Compare commits
4 Commits
settings-d
...
static-rel
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6946ad4e8 | ||
|
|
c9972c2972 | ||
|
|
afdc53fdb7 | ||
|
|
d2e5947cf3 |
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Bug Report (Windows)
|
||||
description: Zed Windows Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["windows"]
|
||||
title: "Windows: <a short description of the Windows bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one-line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one-line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -866,7 +866,7 @@ jobs:
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
|
||||
name: ZedEditorUserSetup-x64-${{ github.event.pull_request.head.sha || github.sha }}.exe
|
||||
path: ${{ env.SETUP_PATH }}
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
|
||||
272
Cargo.lock
generated
272
Cargo.lock
generated
@@ -570,12 +570,12 @@ checksum = "e9d4ee0d472d1cd2e28c97dfa124b3d8d992e10eb0a035f33f5d12e3a177ba3b"
|
||||
|
||||
[[package]]
|
||||
name = "ammonia"
|
||||
version = "4.1.2"
|
||||
version = "4.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6"
|
||||
checksum = "3ada2ee439075a3e70b6992fce18ac4e407cd05aea9ca3f75d2c0b0c20bbb364"
|
||||
dependencies = [
|
||||
"cssparser",
|
||||
"html5ever 0.35.0",
|
||||
"html5ever 0.31.0",
|
||||
"maplit",
|
||||
"tendril",
|
||||
"url",
|
||||
@@ -778,9 +778,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_repr",
|
||||
"url",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols 0.32.6",
|
||||
"zbus",
|
||||
]
|
||||
|
||||
@@ -2413,20 +2410,6 @@ dependencies = [
|
||||
"piper",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bm25"
|
||||
version = "2.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cbd8ffdfb7b4c2ff038726178a780a94f90525ed0ad264c0afaa75dd8c18a64"
|
||||
dependencies = [
|
||||
"cached",
|
||||
"deunicode",
|
||||
"fxhash",
|
||||
"rust-stemmers",
|
||||
"stop-words",
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borrow-or-share"
|
||||
version = "0.2.2"
|
||||
@@ -2633,39 +2616,6 @@ dependencies = [
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cached"
|
||||
version = "0.56.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "801927ee168e17809ab8901d9f01f700cd7d8d6a6527997fee44e4b0327a253c"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"cached_proc_macro",
|
||||
"cached_proc_macro_types",
|
||||
"hashbrown 0.15.3",
|
||||
"once_cell",
|
||||
"thiserror 2.0.12",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cached_proc_macro"
|
||||
version = "0.25.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9225bdcf4e4a9a4c08bf16607908eb2fbf746828d5e0b5e019726dbf6571f201"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cached_proc_macro_types"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0"
|
||||
|
||||
[[package]]
|
||||
name = "call"
|
||||
version = "0.1.0"
|
||||
@@ -3363,27 +3313,6 @@ dependencies = [
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codestral"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"edit_prediction",
|
||||
"edit_prediction_context",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
"language_models",
|
||||
"log",
|
||||
"mistral",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"text",
|
||||
"workspace-hack",
|
||||
"zed-http-client",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.44.0"
|
||||
@@ -4722,7 +4651,7 @@ dependencies = [
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"shlex",
|
||||
"sysinfo 0.37.2",
|
||||
"sysinfo",
|
||||
"task",
|
||||
"tasks_ui",
|
||||
"telemetry",
|
||||
@@ -4839,12 +4768,6 @@ dependencies = [
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deunicode"
|
||||
version = "1.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abd57806937c9cc163efc8ea3910e00a62e2aeb0b8119f1793a978088f8f6b04"
|
||||
|
||||
[[package]]
|
||||
name = "diagnostics"
|
||||
version = "0.1.0"
|
||||
@@ -5028,8 +4951,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"task",
|
||||
"theme",
|
||||
"workspace-hack",
|
||||
"zed",
|
||||
"zed-util",
|
||||
@@ -5191,7 +5112,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
"codestral",
|
||||
"copilot",
|
||||
"edit_prediction",
|
||||
"editor",
|
||||
@@ -5244,9 +5164,6 @@ dependencies = [
|
||||
"strum 0.27.1",
|
||||
"text",
|
||||
"tree-sitter",
|
||||
"tree-sitter-c",
|
||||
"tree-sitter-cpp",
|
||||
"tree-sitter-go",
|
||||
"workspace-hack",
|
||||
"zed-collections",
|
||||
"zed-util",
|
||||
@@ -5944,7 +5861,9 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"editor",
|
||||
"gpui",
|
||||
"menu",
|
||||
"system_specs",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
@@ -6498,15 +6417,6 @@ dependencies = [
|
||||
"thread_local",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fxhash"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gemm"
|
||||
version = "0.17.1"
|
||||
@@ -6837,7 +6747,6 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"git2",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
@@ -6854,7 +6763,6 @@ dependencies = [
|
||||
"time",
|
||||
"unindent",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"uuid",
|
||||
"workspace-hack",
|
||||
"zed-collections",
|
||||
@@ -7079,7 +6987,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gpui"
|
||||
version = "0.2.0"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"as-raw-xcb-connection",
|
||||
@@ -7154,7 +7062,7 @@ dependencies = [
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-cursor",
|
||||
"wayland-protocols 0.31.2",
|
||||
"wayland-protocols",
|
||||
"wayland-protocols-plasma",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
@@ -7517,12 +7425,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "html5ever"
|
||||
version = "0.35.0"
|
||||
version = "0.31.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4"
|
||||
checksum = "953cbbe631aae7fc0a112702ad5d3aaf09da38beaf45ea84610d6e1c358f569c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"markup5ever 0.35.0",
|
||||
"mac",
|
||||
"markup5ever 0.16.1",
|
||||
"match_token",
|
||||
]
|
||||
|
||||
@@ -8114,7 +8023,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
@@ -8867,6 +8775,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"shlex",
|
||||
"smol",
|
||||
"task",
|
||||
"text",
|
||||
@@ -8949,9 +8858,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.176"
|
||||
version = "0.2.172"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
|
||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
|
||||
[[package]]
|
||||
name = "libdbus-sys"
|
||||
@@ -8992,7 +8901,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.48.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -9526,9 +9435,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "markup5ever"
|
||||
version = "0.35.0"
|
||||
version = "0.16.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3"
|
||||
checksum = "d0a8096766c229e8c88a3900c9b44b7e06aa7f7343cc229158c3e58ef8f9973a"
|
||||
dependencies = [
|
||||
"log",
|
||||
"tendril",
|
||||
@@ -9549,9 +9458,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "match_token"
|
||||
version = "0.35.0"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf"
|
||||
checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -10535,16 +10444,6 @@ dependencies = [
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-io-kit"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71c1c64d6120e51cd86033f67176b1cb66780c2efe34dec55176f77befd93c0a"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-metal"
|
||||
version = "0.3.1"
|
||||
@@ -10633,15 +10532,20 @@ dependencies = [
|
||||
name = "onboarding"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ai_onboarding",
|
||||
"anyhow",
|
||||
"client",
|
||||
"component",
|
||||
"db",
|
||||
"documented",
|
||||
"editor",
|
||||
"fs",
|
||||
"fuzzy",
|
||||
"git",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"language_model",
|
||||
"menu",
|
||||
"notifications",
|
||||
"picker",
|
||||
@@ -12105,6 +12009,7 @@ dependencies = [
|
||||
"dap_adapters",
|
||||
"extension",
|
||||
"fancy-regex 0.14.0",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
@@ -13086,8 +12991,7 @@ dependencies = [
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"smol",
|
||||
"sysinfo 0.37.2",
|
||||
"task",
|
||||
"sysinfo",
|
||||
"thiserror 2.0.12",
|
||||
"toml 0.8.20",
|
||||
"unindent",
|
||||
@@ -13550,16 +13454,6 @@ dependencies = [
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-stemmers"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e46a2036019fdb888131db7a4c847a1063a7493f971ed94ea82c67eada63ca54"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust_decimal"
|
||||
version = "1.38.0"
|
||||
@@ -14464,30 +14358,25 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"bm25",
|
||||
"client",
|
||||
"command_palette_hooks",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"heck 0.5.0",
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"schemars 1.0.1",
|
||||
"search",
|
||||
"serde",
|
||||
"session",
|
||||
"settings",
|
||||
"strum 0.27.1",
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"ui_input",
|
||||
"workspace",
|
||||
@@ -15157,15 +15046,6 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
|
||||
|
||||
[[package]]
|
||||
name = "stop-words"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "645a3d441ccf4bf47f2e4b7681461986681a6eeea9937d4c3bc9febd61d17c71"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "story"
|
||||
version = "0.1.0"
|
||||
@@ -15746,21 +15626,7 @@ dependencies = [
|
||||
"memchr",
|
||||
"ntapi",
|
||||
"rayon",
|
||||
"windows 0.54.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sysinfo"
|
||||
version = "0.37.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16607d5caffd1c07ce073528f9ed972d88db15dd44023fa57142963be3feb11f"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"memchr",
|
||||
"ntapi",
|
||||
"objc2-core-foundation",
|
||||
"objc2-io-kit",
|
||||
"windows 0.61.1",
|
||||
"windows 0.57.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -15845,7 +15711,7 @@ dependencies = [
|
||||
"pciid-parser",
|
||||
"release_channel",
|
||||
"serde",
|
||||
"sysinfo 0.37.2",
|
||||
"sysinfo",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -16053,7 +15919,7 @@ dependencies = [
|
||||
"serde",
|
||||
"settings",
|
||||
"smol",
|
||||
"sysinfo 0.37.2",
|
||||
"sysinfo",
|
||||
"task",
|
||||
"theme",
|
||||
"thiserror 2.0.12",
|
||||
@@ -17071,7 +16937,8 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tree-sitter-typescript"
|
||||
version = "0.23.2"
|
||||
source = "git+https://github.com/zed-industries/tree-sitter-typescript?rev=e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899#e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c5f76ed8d947a75cc446d5fccd8b602ebf0cde64ccf2ffa434d873d7a575eff"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -18300,18 +18167,6 @@ dependencies = [
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols"
|
||||
version = "0.32.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0781cf46869b37e36928f7b432273c0995aa8aed9552c556fb18754420541efc"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols-plasma"
|
||||
version = "0.2.0"
|
||||
@@ -18321,7 +18176,7 @@ dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols 0.31.2",
|
||||
"wayland-protocols",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
@@ -18594,6 +18449,16 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows"
|
||||
version = "0.57.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143"
|
||||
dependencies = [
|
||||
"windows-core 0.57.0",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows"
|
||||
version = "0.58.0"
|
||||
@@ -18650,6 +18515,18 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.57.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d"
|
||||
dependencies = [
|
||||
"windows-implement 0.57.0",
|
||||
"windows-interface 0.57.0",
|
||||
"windows-result 0.1.2",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.58.0"
|
||||
@@ -18686,6 +18563,17 @@ dependencies = [
|
||||
"windows-link 0.1.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-implement"
|
||||
version = "0.57.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-implement"
|
||||
version = "0.58.0"
|
||||
@@ -18708,6 +18596,17 @@ dependencies = [
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-interface"
|
||||
version = "0.57.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-interface"
|
||||
version = "0.58.0"
|
||||
@@ -19526,7 +19425,6 @@ dependencies = [
|
||||
"aho-corasick",
|
||||
"anstream",
|
||||
"arrayvec",
|
||||
"ashpd 0.11.0",
|
||||
"async-compression",
|
||||
"async-std",
|
||||
"async-tungstenite",
|
||||
@@ -19699,17 +19597,13 @@ dependencies = [
|
||||
"wasmtime",
|
||||
"wasmtime-cranelift",
|
||||
"wasmtime-environ",
|
||||
"wayland-backend",
|
||||
"wayland-sys",
|
||||
"winapi",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
"windows-numerics",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.61.0",
|
||||
"zbus_macros",
|
||||
"zeroize",
|
||||
"zvariant",
|
||||
]
|
||||
@@ -20067,7 +19961,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.209.0"
|
||||
version = "0.208.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -20090,7 +19984,6 @@ dependencies = [
|
||||
"clap",
|
||||
"cli",
|
||||
"client",
|
||||
"codestral",
|
||||
"collab_ui",
|
||||
"command_palette",
|
||||
"component",
|
||||
@@ -20178,7 +20071,7 @@ dependencies = [
|
||||
"snippets_ui",
|
||||
"supermaven",
|
||||
"svg_preview",
|
||||
"sysinfo 0.37.2",
|
||||
"sysinfo",
|
||||
"system_specs",
|
||||
"tab_switcher",
|
||||
"task",
|
||||
@@ -20385,7 +20278,7 @@ dependencies = [
|
||||
"rand 0.8.5",
|
||||
"screencapturekit",
|
||||
"screencapturekit-sys",
|
||||
"sysinfo 0.31.4",
|
||||
"sysinfo",
|
||||
"tao-core-video-sys",
|
||||
"windows 0.61.1",
|
||||
"windows-capture",
|
||||
@@ -20795,8 +20688,6 @@ dependencies = [
|
||||
"indoc",
|
||||
"language",
|
||||
"log",
|
||||
"multi_buffer",
|
||||
"ordered-float 2.10.1",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"serde",
|
||||
@@ -20850,7 +20741,6 @@ dependencies = [
|
||||
"terminal_view",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
"zed-collections",
|
||||
"zed-util",
|
||||
"zeta",
|
||||
"zeta2",
|
||||
|
||||
@@ -164,7 +164,6 @@ members = [
|
||||
"crates/sum_tree",
|
||||
"crates/supermaven",
|
||||
"crates/supermaven_api",
|
||||
"crates/codestral",
|
||||
"crates/svg_preview",
|
||||
"crates/system_specs",
|
||||
"crates/tab_switcher",
|
||||
@@ -399,7 +398,6 @@ streaming_diff = { path = "crates/streaming_diff" }
|
||||
sum_tree = { path = "crates/sum_tree", package = "zed-sum-tree", version = "0.1.0" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
codestral = { path = "crates/codestral" }
|
||||
system_specs = { path = "crates/system_specs" }
|
||||
tab_switcher = { path = "crates/tab_switcher" }
|
||||
task = { path = "crates/task" }
|
||||
@@ -478,6 +476,7 @@ bitflags = "2.6.0"
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
@@ -654,7 +653,7 @@ strum = { version = "0.27.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
syn = { version = "2.0.101", features = ["full", "extra-traits", "visit-mut"] }
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.37.0"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
@@ -693,7 +692,7 @@ tree-sitter-python = "0.25"
|
||||
tree-sitter-regex = "0.24"
|
||||
tree-sitter-ruby = "0.23"
|
||||
tree-sitter-rust = "0.24"
|
||||
tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347
|
||||
tree-sitter-typescript = "0.23"
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
unicode-script = "0.5.7"
|
||||
|
||||
2
Cross.toml
Normal file
2
Cross.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[build]
|
||||
dockerfile = "Dockerfile-cross"
|
||||
17
Dockerfile-cross
Normal file
17
Dockerfile-cross
Normal file
@@ -0,0 +1,17 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG CROSS_BASE_IMAGE
|
||||
FROM ${CROSS_BASE_IMAGE}
|
||||
WORKDIR /app
|
||||
ARG TZ=Etc/UTC \
|
||||
LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
ENV CARGO_TERM_COLOR=always
|
||||
|
||||
COPY script/install-mold script/
|
||||
RUN ./script/install-mold "2.34.0"
|
||||
COPY script/remote-server script/
|
||||
RUN ./script/remote-server
|
||||
|
||||
COPY . .
|
||||
@@ -1,3 +1,9 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M13.2806 4.66818L8.26042 1.76982C8.09921 1.67673 7.9003 1.67673 7.73909 1.76982L2.71918 4.66818C2.58367 4.74642 2.5 4.89112 2.5 5.04785V10.8924C2.5 11.0489 2.58367 11.1938 2.71918 11.2721L7.73934 14.1704C7.90054 14.2635 8.09946 14.2635 8.26066 14.1704L13.2808 11.2721C13.4163 11.1938 13.5 11.0491 13.5 10.8924V5.04785C13.5 4.89136 13.4163 4.74642 13.2808 4.66818H13.2806ZM12.9653 5.28212L8.11901 13.676C8.08626 13.7326 7.99977 13.7095 7.99977 13.6439V8.14771C7.99977 8.03788 7.94107 7.9363 7.84586 7.88115L3.08613 5.13317C3.02957 5.10041 3.05266 5.0139 3.11818 5.0139H12.8106C12.9483 5.0139 13.0343 5.1631 12.9655 5.28236H12.9653V5.28212Z" fill="#C4CAD4"/>
|
||||
<path opacity="0.6" d="M3.5 11V5.5L8.5 8L3.5 11Z" fill="black"/>
|
||||
<path opacity="0.4" d="M8.5 14L3.5 11L8.5 8V14Z" fill="black"/>
|
||||
<path opacity="0.6" d="M8.5 5.5H3.5L8.5 2.5L8.5 5.5Z" fill="black"/>
|
||||
<path opacity="0.8" d="M8.5 5.5V2.5L13.5 5.5H8.5Z" fill="black"/>
|
||||
<path opacity="0.2" d="M13.5 11L8.5 14L11 9.5L13.5 11Z" fill="black"/>
|
||||
<path opacity="0.5" d="M13.5 11L11 9.5L13.5 5V11Z" fill="black"/>
|
||||
<path d="M3.5 11V5L8.5 2.11325L13.5 5V11L8.5 13.8868L3.5 11Z" stroke="black"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 769 B After Width: | Height: | Size: 583 B |
@@ -30,8 +30,8 @@
|
||||
"ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-alt-,": "zed::OpenSettingsFile",
|
||||
"ctrl-,": "zed::OpenSettingsEditor",
|
||||
"ctrl-alt-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"shift-f5": "debugger::Stop",
|
||||
@@ -374,6 +374,13 @@
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"bindings": {
|
||||
@@ -527,15 +534,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": "editor::FoldAtLevel_1",
|
||||
"ctrl-k ctrl-2": "editor::FoldAtLevel_2",
|
||||
"ctrl-k ctrl-3": "editor::FoldAtLevel_3",
|
||||
"ctrl-k ctrl-4": "editor::FoldAtLevel_4",
|
||||
"ctrl-k ctrl-5": "editor::FoldAtLevel_5",
|
||||
"ctrl-k ctrl-6": "editor::FoldAtLevel_6",
|
||||
"ctrl-k ctrl-7": "editor::FoldAtLevel_7",
|
||||
"ctrl-k ctrl-8": "editor::FoldAtLevel_8",
|
||||
"ctrl-k ctrl-9": "editor::FoldAtLevel_9",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -621,7 +628,7 @@
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-shift-t": "pane::ReopenClosedItem",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymap",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymapEditor",
|
||||
"ctrl-k ctrl-t": "theme_selector::Toggle",
|
||||
"ctrl-alt-super-p": "settings_profile_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
@@ -1229,6 +1236,9 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
@@ -1240,44 +1250,5 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"ctrl-pageup": "settings_editor::FocusPreviousFile",
|
||||
"ctrl-pagedown": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "settings_editor::FocusPreviousNavEntry",
|
||||
"down": "settings_editor::FocusNextNavEntry",
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -39,8 +39,8 @@
|
||||
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"cmd-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"cmd-,": "zed::OpenSettings",
|
||||
"cmd-alt-,": "zed::OpenSettingsFile",
|
||||
"cmd-,": "zed::OpenSettingsEditor",
|
||||
"cmd-alt-,": "zed::OpenSettings",
|
||||
"cmd-q": "zed::Quit",
|
||||
"cmd-h": "zed::Hide",
|
||||
"alt-cmd-h": "zed::HideOthers",
|
||||
@@ -431,6 +431,13 @@
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -582,15 +589,15 @@
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-1": "editor::FoldAtLevel_1",
|
||||
"cmd-k cmd-2": "editor::FoldAtLevel_2",
|
||||
"cmd-k cmd-3": "editor::FoldAtLevel_3",
|
||||
"cmd-k cmd-4": "editor::FoldAtLevel_4",
|
||||
"cmd-k cmd-5": "editor::FoldAtLevel_5",
|
||||
"cmd-k cmd-6": "editor::FoldAtLevel_6",
|
||||
"cmd-k cmd-7": "editor::FoldAtLevel_7",
|
||||
"cmd-k cmd-8": "editor::FoldAtLevel_8",
|
||||
"cmd-k cmd-9": "editor::FoldAtLevel_9",
|
||||
"cmd-k cmd-1": ["editor::FoldAtLevel", 1],
|
||||
"cmd-k cmd-2": ["editor::FoldAtLevel", 2],
|
||||
"cmd-k cmd-3": ["editor::FoldAtLevel", 3],
|
||||
"cmd-k cmd-4": ["editor::FoldAtLevel", 4],
|
||||
"cmd-k cmd-5": ["editor::FoldAtLevel", 5],
|
||||
"cmd-k cmd-6": ["editor::FoldAtLevel", 6],
|
||||
"cmd-k cmd-7": ["editor::FoldAtLevel", 7],
|
||||
"cmd-k cmd-8": ["editor::FoldAtLevel", 8],
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", 9],
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
// Using `ctrl-space` / `ctrl-shift-space` in Zed requires disabling the macOS global shortcut.
|
||||
@@ -690,7 +697,7 @@
|
||||
"cmd-shift-f": "pane::DeploySearch",
|
||||
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"cmd-shift-t": "pane::ReopenClosedItem",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-k cmd-s": "zed::OpenKeymapEditor",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"ctrl-alt-cmd-p": "settings_profile_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
@@ -1334,7 +1341,10 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "onboarding::Finish",
|
||||
"cmd-1": "onboarding::ActivateBasicsPage",
|
||||
"cmd-2": "onboarding::ActivateEditingPage",
|
||||
"cmd-3": "onboarding::ActivateAISetupPage",
|
||||
"cmd-escape": "onboarding::Finish",
|
||||
"alt-tab": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
}
|
||||
@@ -1345,44 +1355,5 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"cmd-m": "settings_editor::Minimize",
|
||||
"cmd-f": "search::FocusSearch",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"cmd-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"cmd-{": "settings_editor::FocusPreviousFile",
|
||||
"cmd-}": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "settings_editor::FocusPreviousNavEntry",
|
||||
"down": "settings_editor::FocusNextNavEntry",
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -29,8 +29,8 @@
|
||||
"ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-alt-,": "zed::OpenSettingsFile",
|
||||
"ctrl-,": "zed::OpenSettingsEditor",
|
||||
"ctrl-alt-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"shift-f5": "debugger::Stop",
|
||||
@@ -134,7 +134,7 @@
|
||||
"ctrl-k z": "editor::ToggleSoftWrap",
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": "buffer_search::DeployReplace",
|
||||
"ctrl-shift-.": "agent::QuoteSelection",
|
||||
"ctrl-shift-.": "assistant::QuoteSelection",
|
||||
"ctrl-shift-,": "assistant::InsertIntoEditor",
|
||||
"shift-alt-e": "editor::SelectEnclosingSymbol",
|
||||
"ctrl-shift-backspace": "editor::GoToPreviousChange",
|
||||
@@ -244,7 +244,7 @@
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-shift-.": "agent::QuoteSelection",
|
||||
"ctrl-shift-.": "assistant::QuoteSelection",
|
||||
"shift-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
@@ -383,6 +383,13 @@
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -536,15 +543,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": "editor::FoldAtLevel_1",
|
||||
"ctrl-k ctrl-2": "editor::FoldAtLevel_2",
|
||||
"ctrl-k ctrl-3": "editor::FoldAtLevel_3",
|
||||
"ctrl-k ctrl-4": "editor::FoldAtLevel_4",
|
||||
"ctrl-k ctrl-5": "editor::FoldAtLevel_5",
|
||||
"ctrl-k ctrl-6": "editor::FoldAtLevel_6",
|
||||
"ctrl-k ctrl-7": "editor::FoldAtLevel_7",
|
||||
"ctrl-k ctrl-8": "editor::FoldAtLevel_8",
|
||||
"ctrl-k ctrl-9": "editor::FoldAtLevel_9",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -623,7 +630,7 @@
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-shift-t": "pane::ReopenClosedItem",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymap",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymapEditor",
|
||||
"ctrl-k ctrl-t": "theme_selector::Toggle",
|
||||
"ctrl-alt-super-p": "settings_profile_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
@@ -1257,48 +1264,12 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"ctrl-pageup": "settings_editor::FocusPreviousFile",
|
||||
"ctrl-pagedown": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "settings_editor::FocusPreviousNavEntry",
|
||||
"down": "settings_editor::FocusNextNavEntry",
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[
|
||||
{
|
||||
"bindings": {
|
||||
"ctrl-alt-s": "zed::OpenSettingsFile",
|
||||
"ctrl-alt-s": "zed::OpenSettings",
|
||||
"ctrl-{": "pane::ActivatePreviousItem",
|
||||
"ctrl-}": "pane::ActivateNextItem",
|
||||
"shift-escape": null, // Unmap workspace::zoom
|
||||
|
||||
@@ -580,18 +580,18 @@
|
||||
// "q": "vim::AnyQuotes",
|
||||
"q": "vim::MiniQuotes",
|
||||
"|": "vim::VerticalBars",
|
||||
"(": ["vim::Parentheses", { "opening": true }],
|
||||
"(": "vim::Parentheses",
|
||||
")": "vim::Parentheses",
|
||||
"b": "vim::Parentheses",
|
||||
// "b": "vim::AnyBrackets",
|
||||
// "b": "vim::MiniBrackets",
|
||||
"[": ["vim::SquareBrackets", { "opening": true }],
|
||||
"[": "vim::SquareBrackets",
|
||||
"]": "vim::SquareBrackets",
|
||||
"r": "vim::SquareBrackets",
|
||||
"{": ["vim::CurlyBrackets", { "opening": true }],
|
||||
"{": "vim::CurlyBrackets",
|
||||
"}": "vim::CurlyBrackets",
|
||||
"shift-b": "vim::CurlyBrackets",
|
||||
"<": ["vim::AngleBrackets", { "opening": true }],
|
||||
"<": "vim::AngleBrackets",
|
||||
">": "vim::AngleBrackets",
|
||||
"a": "vim::Argument",
|
||||
"i": "vim::IndentObj",
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "zed://schemas/settings",
|
||||
/// The displayed name of this project. If not set or empty, the root directory name
|
||||
/// will be displayed.
|
||||
"project_name": "",
|
||||
@@ -77,7 +76,7 @@
|
||||
"ui_font_size": 16,
|
||||
// The default font size for agent responses in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_ui_font_size": null,
|
||||
// The default font size for user messages in the agent panel.
|
||||
// The default font size for user messages in the agent panel. Falls back to the buffer font size if unset.
|
||||
"agent_buffer_font_size": 12,
|
||||
// How much to fade out unused code.
|
||||
"unnecessary_code_fade": 0.3,
|
||||
@@ -722,9 +721,7 @@
|
||||
// Whether to enable drag-and-drop operations in the project panel.
|
||||
"drag_and_drop": true,
|
||||
// Whether to hide the root entry when only one folder is open in the window.
|
||||
"hide_root": false,
|
||||
// Whether to hide the hidden entries in the project panel.
|
||||
"hide_hidden": false
|
||||
"hide_root": false
|
||||
},
|
||||
"outline_panel": {
|
||||
// Whether to show the outline panel button in the status bar
|
||||
@@ -1104,7 +1101,7 @@
|
||||
// Removes any lines containing only whitespace at the end of the file and
|
||||
// ensures just one newline at the end.
|
||||
"ensure_final_newline_on_save": true,
|
||||
// Whether or not to perform a buffer format before saving: [on, off]
|
||||
// Whether or not to perform a buffer format before saving: [on, off, prettier, language_server]
|
||||
// Keep in mind, if the autosave with delay is enabled, format_on_save will be ignored
|
||||
"format_on_save": "on",
|
||||
// How to perform a buffer format. This setting can take 4 values:
|
||||
@@ -1236,8 +1233,8 @@
|
||||
"git_gutter": "tracked_files",
|
||||
/// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter.
|
||||
///
|
||||
/// Default: 0
|
||||
"gutter_debounce": 0,
|
||||
/// Default: null
|
||||
"gutter_debounce": null,
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
@@ -1314,18 +1311,15 @@
|
||||
// "proxy": "",
|
||||
// "proxy_no_verify": false
|
||||
// },
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true,
|
||||
|
||||
"copilot": {
|
||||
"enterprise_uri": null,
|
||||
"proxy": null,
|
||||
"proxy_no_verify": null
|
||||
},
|
||||
"codestral": {
|
||||
"model": null,
|
||||
"max_tokens": null
|
||||
},
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true
|
||||
}
|
||||
},
|
||||
// Settings specific to journaling
|
||||
"journal": {
|
||||
@@ -1407,8 +1401,8 @@
|
||||
// 4. A box drawn around the following character
|
||||
// "hollow"
|
||||
//
|
||||
// Default: "block"
|
||||
"cursor_shape": "block",
|
||||
// Default: not set, defaults to "block"
|
||||
"cursor_shape": null,
|
||||
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
|
||||
// Alternate Scroll mode converts mouse scroll events into up / down key
|
||||
// presses when in the alternate screen (e.g. when running applications
|
||||
@@ -1430,8 +1424,8 @@
|
||||
// Whether or not selecting text in the terminal will automatically
|
||||
// copy to the system clipboard.
|
||||
"copy_on_select": false,
|
||||
// Whether to keep the text selection after copying it to the clipboard.
|
||||
"keep_selection_on_copy": true,
|
||||
// Whether to keep the text selection after copying it to the clipboard
|
||||
"keep_selection_on_copy": false,
|
||||
// Whether to show the terminal button in the status bar
|
||||
"button": true,
|
||||
// Any key-value pairs added to this list will be added to the terminal's
|
||||
@@ -1521,6 +1515,7 @@
|
||||
// A value of 45 preserves colorful themes while ensuring legibility.
|
||||
"minimum_contrast": 45
|
||||
},
|
||||
"code_actions_on_format": {},
|
||||
// Settings related to running tasks.
|
||||
"tasks": {
|
||||
"variables": {},
|
||||
@@ -1690,7 +1685,9 @@
|
||||
"preferred_line_length": 72
|
||||
},
|
||||
"Go": {
|
||||
"formatter": [{ "code_action": "source.organizeImports" }, { "language_server": {} }],
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
"debuggers": ["Delve"]
|
||||
},
|
||||
"GraphQL": {
|
||||
@@ -2054,7 +2051,7 @@
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
"profiles": {},
|
||||
"profiles": [],
|
||||
|
||||
// A map of log scopes to the desired log level.
|
||||
// Useful for filtering out noisy logs or enabling more verbose logging.
|
||||
|
||||
@@ -9,8 +9,6 @@ disallowed-methods = [
|
||||
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
|
||||
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
|
||||
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
|
||||
{ path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." },
|
||||
{ path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." },
|
||||
]
|
||||
disallowed-types = [
|
||||
# { path = "std::collections::HashMap", replacement = "collections::HashMap" },
|
||||
|
||||
@@ -2112,7 +2112,6 @@ impl AcpThread {
|
||||
|
||||
let project = self.project.clone();
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
|
||||
let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into());
|
||||
let terminal_task = cx.spawn({
|
||||
@@ -2126,10 +2125,9 @@ impl AcpThread {
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})?
|
||||
.unwrap_or_else(|| get_default_system_shell_preferring_bash());
|
||||
let (task_command, task_args) =
|
||||
ShellBuilder::new(&Shell::Program(shell), is_windows)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let terminal = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_terminal_task(
|
||||
|
||||
@@ -20,6 +20,7 @@ use std::{
|
||||
cmp::Reverse,
|
||||
collections::HashSet,
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -327,13 +328,17 @@ impl ActivityIndicator {
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn pending_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a EnvironmentErrorMessage> {
|
||||
self.project.read(cx).peek_environment_error(cx)
|
||||
fn pending_environment_errors<'a>(
|
||||
&'a self,
|
||||
cx: &'a App,
|
||||
) -> impl Iterator<Item = (&'a Arc<Path>, &'a EnvironmentErrorMessage)> {
|
||||
self.project.read(cx).shell_environment_errors(cx)
|
||||
}
|
||||
|
||||
fn content_to_render(&mut self, cx: &mut Context<Self>) -> Option<Content> {
|
||||
// Show if any direnv calls failed
|
||||
if let Some(error) = self.pending_environment_error(cx) {
|
||||
if let Some((abs_path, error)) = self.pending_environment_errors(cx).next() {
|
||||
let abs_path = abs_path.clone();
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
@@ -343,7 +348,7 @@ impl ActivityIndicator {
|
||||
message: error.0.clone(),
|
||||
on_click: Some(Arc::new(move |this, window, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.pop_environment_error(cx);
|
||||
project.remove_environment_error(&abs_path, cx);
|
||||
});
|
||||
window.dispatch_action(Box::new(workspace::OpenLog), cx);
|
||||
})),
|
||||
|
||||
@@ -1276,6 +1276,62 @@ impl Thread {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn retry_last_completion(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// Clear any existing error state
|
||||
self.retry_state = None;
|
||||
|
||||
// Use the last error context if available, otherwise fall back to configured model
|
||||
let (model, intent) = if let Some((model, intent)) = self.last_error_context.take() {
|
||||
(model, intent)
|
||||
} else if let Some(configured_model) = self.configured_model.as_ref() {
|
||||
let model = configured_model.model.clone();
|
||||
let intent = if self.has_pending_tool_uses() {
|
||||
CompletionIntent::ToolResults
|
||||
} else {
|
||||
CompletionIntent::UserPrompt
|
||||
};
|
||||
(model, intent)
|
||||
} else if let Some(configured_model) = self.get_or_init_configured_model(cx) {
|
||||
let model = configured_model.model.clone();
|
||||
let intent = if self.has_pending_tool_uses() {
|
||||
CompletionIntent::ToolResults
|
||||
} else {
|
||||
CompletionIntent::UserPrompt
|
||||
};
|
||||
(model, intent)
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.send_to_model(model, intent, window, cx);
|
||||
}
|
||||
|
||||
pub fn enable_burn_mode_and_retry(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.completion_mode = CompletionMode::Burn;
|
||||
cx.emit(ThreadEvent::ProfileChanged);
|
||||
self.retry_last_completion(window, cx);
|
||||
}
|
||||
|
||||
pub fn used_tools_since_last_user_message(&self) -> bool {
|
||||
for message in self.messages.iter().rev() {
|
||||
if self.tool_use.message_has_tool_results(message.id) {
|
||||
return true;
|
||||
} else if message.role == Role::User {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
|
||||
@@ -25,21 +25,23 @@ use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use util::ResultExt;
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
const RULES_FILE_NAMES: [&str; 9] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
"GEMINI.md",
|
||||
];
|
||||
static RULES_FILE_NAMES: LazyLock<[&RelPath; 9]> = LazyLock::new(|| {
|
||||
[
|
||||
RelPath::unix(".rules").unwrap(),
|
||||
RelPath::unix(".cursorrules").unwrap(),
|
||||
RelPath::unix(".windsurfrules").unwrap(),
|
||||
RelPath::unix(".clinerules").unwrap(),
|
||||
RelPath::unix(".github/copilot-instructions.md").unwrap(),
|
||||
RelPath::unix("CLAUDE.md").unwrap(),
|
||||
RelPath::unix("AGENT.md").unwrap(),
|
||||
RelPath::unix("AGENTS.md").unwrap(),
|
||||
RelPath::unix("GEMINI.md").unwrap(),
|
||||
]
|
||||
});
|
||||
|
||||
pub struct RulesLoadingError {
|
||||
pub message: SharedString,
|
||||
@@ -475,7 +477,7 @@ impl NativeAgent {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
@@ -556,11 +558,10 @@ impl NativeAgent {
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
project::Event::WorktreeUpdatedEntries(_, items) => {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
if items
|
||||
.iter()
|
||||
.any(|(path, _, _)| RULES_FILE_NAMES.iter().any(|name| path.as_ref() == *name))
|
||||
{
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
}
|
||||
@@ -1418,6 +1419,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows
|
||||
async fn test_save_load_thread(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
@@ -1497,8 +1499,7 @@ mod tests {
|
||||
model.send_last_completion_stream_text_chunk("Lorem.");
|
||||
model.end_last_completion_stream();
|
||||
cx.run_until_parked();
|
||||
summary_model
|
||||
.send_last_completion_stream_text_chunk(&format!("Explaining {}", path!("/a/b.md")));
|
||||
summary_model.send_last_completion_stream_text_chunk("Explaining /a/b.md");
|
||||
summary_model.end_last_completion_stream();
|
||||
|
||||
send.await.unwrap();
|
||||
@@ -1538,7 +1539,7 @@ mod tests {
|
||||
history_entries(&history_store, cx),
|
||||
vec![(
|
||||
HistoryEntryId::AcpThread(session_id.clone()),
|
||||
format!("Explaining {}", path!("/a/b.md"))
|
||||
"Explaining /a/b.md".into()
|
||||
)]
|
||||
);
|
||||
let acp_thread = agent
|
||||
|
||||
@@ -15,11 +15,10 @@ use agent_settings::{
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::adapt_schema_to_format;
|
||||
use chrono::{DateTime, Utc};
|
||||
use client::{ModelRequestUsage, RequestUsage, UserStore};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit};
|
||||
use client::{ModelRequestUsage, RequestUsage};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use fs::Fs;
|
||||
use futures::stream;
|
||||
use futures::{
|
||||
FutureExt,
|
||||
channel::{mpsc, oneshot},
|
||||
@@ -35,7 +34,7 @@ use language_model::{
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
};
|
||||
use project::{
|
||||
Project,
|
||||
@@ -586,7 +585,6 @@ pub struct Thread {
|
||||
pending_title_generation: Option<Task<()>>,
|
||||
summary: Option<SharedString>,
|
||||
messages: Vec<Message>,
|
||||
user_store: Entity<UserStore>,
|
||||
completion_mode: CompletionMode,
|
||||
/// Holds the task that handles agent interaction until the end of the turn.
|
||||
/// Survives across multiple requests as the model performs tool calls and
|
||||
@@ -643,7 +641,6 @@ impl Thread {
|
||||
pending_title_generation: None,
|
||||
summary: None,
|
||||
messages: Vec::new(),
|
||||
user_store: project.read(cx).user_store(),
|
||||
completion_mode: AgentSettings::get_global(cx).preferred_completion_mode,
|
||||
running_turn: None,
|
||||
pending_message: None,
|
||||
@@ -823,7 +820,6 @@ impl Thread {
|
||||
pending_title_generation: None,
|
||||
summary: db_thread.detailed_summary,
|
||||
messages: db_thread.messages,
|
||||
user_store: project.read(cx).user_store(),
|
||||
completion_mode: db_thread.completion_mode.unwrap_or_default(),
|
||||
running_turn: None,
|
||||
pending_message: None,
|
||||
@@ -1253,12 +1249,12 @@ impl Thread {
|
||||
);
|
||||
|
||||
log::debug!("Calling model.stream_completion, attempt {}", attempt);
|
||||
|
||||
let (mut events, mut error) = match model.stream_completion(request, cx).await {
|
||||
Ok(events) => (events, None),
|
||||
Err(err) => (stream::empty().boxed(), Some(err)),
|
||||
};
|
||||
let mut events = model
|
||||
.stream_completion(request, cx)
|
||||
.await
|
||||
.map_err(|error| anyhow!(error))?;
|
||||
let mut tool_results = FuturesUnordered::new();
|
||||
let mut error = None;
|
||||
while let Some(event) = events.next().await {
|
||||
log::trace!("Received completion event: {:?}", event);
|
||||
match event {
|
||||
@@ -1306,10 +1302,8 @@ impl Thread {
|
||||
|
||||
if let Some(error) = error {
|
||||
attempt += 1;
|
||||
let retry = this.update(cx, |this, cx| {
|
||||
let user_store = this.user_store.read(cx);
|
||||
this.handle_completion_error(error, attempt, user_store.plan())
|
||||
})??;
|
||||
let retry =
|
||||
this.update(cx, |this, _| this.handle_completion_error(error, attempt))??;
|
||||
let timer = cx.background_executor().timer(retry.duration);
|
||||
event_stream.send_retry(retry);
|
||||
timer.await;
|
||||
@@ -1336,23 +1330,8 @@ impl Thread {
|
||||
&mut self,
|
||||
error: LanguageModelCompletionError,
|
||||
attempt: u8,
|
||||
plan: Option<Plan>,
|
||||
) -> Result<acp_thread::RetryStatus> {
|
||||
let Some(model) = self.model.as_ref() else {
|
||||
return Err(anyhow!(error));
|
||||
};
|
||||
|
||||
let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID {
|
||||
match plan {
|
||||
Some(Plan::V2(_)) => true,
|
||||
Some(Plan::V1(_)) => self.completion_mode == CompletionMode::Burn,
|
||||
None => false,
|
||||
}
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
if !auto_retry {
|
||||
if self.completion_mode == CompletionMode::Normal {
|
||||
return Err(anyhow!(error));
|
||||
}
|
||||
|
||||
|
||||
@@ -835,10 +835,7 @@ impl acp::Client for ClientDelegate {
|
||||
.map(Shell::Program)
|
||||
})?
|
||||
.unwrap_or(task::Shell::System);
|
||||
let is_windows = project
|
||||
.read_with(&self.cx, |project, cx| project.path_style(cx).is_windows())
|
||||
.unwrap_or(cfg!(windows));
|
||||
let (task_command, task_args) = task::ShellBuilder::new(&shell, is_windows)
|
||||
let (task_command, task_args) = task::ShellBuilder::new(&shell)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(args.command.clone()), &args.args);
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::AgentPanel;
|
||||
use crate::acp::message_editor::MessageEditor;
|
||||
use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
|
||||
use crate::context_picker::file_context_picker::{FileMatch, search_files};
|
||||
use crate::context_picker::rules_context_picker::{RulesContextEntry, search_rules};
|
||||
use crate::context_picker::symbol_context_picker::SymbolMatch;
|
||||
@@ -759,13 +759,13 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
let editor = editor.clone();
|
||||
move |cx| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
.update(cx, |_editor, cx| {
|
||||
match intent {
|
||||
CompletionIntent::Complete
|
||||
| CompletionIntent::CompleteWithInsert
|
||||
| CompletionIntent::CompleteWithReplace => {
|
||||
if !is_missing_argument {
|
||||
editor.send(cx);
|
||||
cx.emit(MessageEditorEvent::Send);
|
||||
}
|
||||
}
|
||||
CompletionIntent::Compose => {}
|
||||
|
||||
@@ -141,9 +141,7 @@ impl MessageEditor {
|
||||
|
||||
subscriptions.push(cx.subscribe_in(&editor, window, {
|
||||
move |this, editor, event, window, cx| {
|
||||
if let EditorEvent::Edited { .. } = event
|
||||
&& !editor.read(cx).read_only(cx)
|
||||
{
|
||||
if let EditorEvent::Edited { .. } = event {
|
||||
let snapshot = editor.update(cx, |editor, cx| {
|
||||
let new_hints = this
|
||||
.command_hint(editor.buffer(), cx)
|
||||
@@ -825,20 +823,13 @@ impl MessageEditor {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn send(&mut self, cx: &mut Context<Self>) {
|
||||
fn send(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.is_empty(cx) {
|
||||
return;
|
||||
}
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.clear_inlay_hints(cx);
|
||||
});
|
||||
cx.emit(MessageEditorEvent::Send)
|
||||
}
|
||||
|
||||
fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.send(cx);
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.emit(MessageEditorEvent::Cancel)
|
||||
}
|
||||
@@ -1039,7 +1030,6 @@ impl MessageEditor {
|
||||
) else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx);
|
||||
});
|
||||
@@ -1297,7 +1287,7 @@ impl Render for MessageEditor {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.key_context("MessageEditor")
|
||||
.on_action(cx.listener(Self::chat))
|
||||
.on_action(cx.listener(Self::send))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.flex_1()
|
||||
|
||||
@@ -1045,37 +1045,33 @@ impl AcpThreadView {
|
||||
return;
|
||||
};
|
||||
|
||||
self.message_editor
|
||||
.update(cx, |editor, cx| editor.clear(window, cx));
|
||||
|
||||
let connection = thread.read(cx).connection().clone();
|
||||
let can_login = !connection.auth_methods().is_empty() || self.login.is_some();
|
||||
// Does the agent have a specific logout command? Prefer that in case they need to reset internal state.
|
||||
let logout_supported = text == "/logout"
|
||||
&& self
|
||||
.available_commands
|
||||
.borrow()
|
||||
.iter()
|
||||
.any(|command| command.name == "logout");
|
||||
if can_login && !logout_supported {
|
||||
let this = cx.weak_entity();
|
||||
let agent = self.agent.clone();
|
||||
window.defer(cx, |window, cx| {
|
||||
Self::handle_auth_required(
|
||||
this,
|
||||
AuthRequired {
|
||||
description: None,
|
||||
provider_id: None,
|
||||
},
|
||||
agent,
|
||||
connection,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.notify();
|
||||
let auth_methods = connection.auth_methods();
|
||||
let has_supported_auth = auth_methods.iter().any(|method| {
|
||||
let id = method.id.0.as_ref();
|
||||
id == "claude-login" || id == "spawn-gemini-cli"
|
||||
});
|
||||
let can_login = has_supported_auth || auth_methods.is_empty() || self.login.is_some();
|
||||
if !can_login {
|
||||
return;
|
||||
}
|
||||
};
|
||||
let this = cx.weak_entity();
|
||||
let agent = self.agent.clone();
|
||||
window.defer(cx, |window, cx| {
|
||||
Self::handle_auth_required(
|
||||
this,
|
||||
AuthRequired {
|
||||
description: None,
|
||||
provider_id: None,
|
||||
},
|
||||
agent,
|
||||
connection,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
|
||||
self.send_impl(self.message_editor.clone(), window, cx)
|
||||
@@ -2731,7 +2727,7 @@ impl AcpThreadView {
|
||||
let output_line_count = output.map(|output| output.content_line_count).unwrap_or(0);
|
||||
|
||||
let command_failed = command_finished
|
||||
&& output.is_some_and(|o| o.exit_status.is_some_and(|status| !status.success()));
|
||||
&& output.is_some_and(|o| o.exit_status.is_none_or(|status| !status.success()));
|
||||
|
||||
let time_elapsed = if let Some(output) = output {
|
||||
output.ended_at.duration_since(started_at)
|
||||
@@ -3286,12 +3282,6 @@ impl AcpThreadView {
|
||||
this.style(ButtonStyle::Outlined)
|
||||
}
|
||||
})
|
||||
.when_some(
|
||||
method.description.clone(),
|
||||
|this, description| {
|
||||
this.tooltip(Tooltip::text(description))
|
||||
},
|
||||
)
|
||||
.on_click({
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
telemetry::event!(
|
||||
@@ -4981,12 +4971,10 @@ impl AcpThreadView {
|
||||
})
|
||||
}
|
||||
|
||||
/// Inserts the selected text into the message editor or the message being
|
||||
/// edited, if any.
|
||||
pub(crate) fn insert_selections(&self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.active_editor(cx).update(cx, |editor, cx| {
|
||||
editor.insert_selections(window, cx);
|
||||
});
|
||||
self.message_editor.update(cx, |message_editor, cx| {
|
||||
message_editor.insert_selections(window, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn render_thread_retry_status_callout(
|
||||
@@ -5397,23 +5385,6 @@ impl AcpThreadView {
|
||||
};
|
||||
task.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
/// Returns the currently active editor, either for a message that is being
|
||||
/// edited or the editor for a new message.
|
||||
fn active_editor(&self, cx: &App) -> Entity<MessageEditor> {
|
||||
if let Some(index) = self.editing_message
|
||||
&& let Some(editor) = self
|
||||
.entry_view_state
|
||||
.read(cx)
|
||||
.entry(index)
|
||||
.and_then(|e| e.message_editor())
|
||||
.cloned()
|
||||
{
|
||||
editor
|
||||
} else {
|
||||
self.message_editor.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn loading_contents_spinner(size: IconSize) -> AnyElement {
|
||||
@@ -5428,7 +5399,7 @@ impl Focusable for AcpThreadView {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match self.thread_state {
|
||||
ThreadState::Loading { .. } | ThreadState::Ready { .. } => {
|
||||
self.active_editor(cx).focus_handle(cx)
|
||||
self.message_editor.focus_handle(cx)
|
||||
}
|
||||
ThreadState::LoadError(_) | ThreadState::Unauthenticated { .. } => {
|
||||
self.focus_handle.clone()
|
||||
@@ -6689,146 +6660,4 @@ pub(crate) mod tests {
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_message_editing_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Original message to edit", window, cx)
|
||||
});
|
||||
thread_view.update_in(cx, |thread_view, window, cx| thread_view.send(window, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
let user_message_editor = thread_view.read_with(cx, |thread_view, cx| {
|
||||
thread_view
|
||||
.entry_view_state
|
||||
.read(cx)
|
||||
.entry(0)
|
||||
.expect("Should have at least one entry")
|
||||
.message_editor()
|
||||
.expect("Should have message editor")
|
||||
.clone()
|
||||
});
|
||||
|
||||
cx.focus(&user_message_editor);
|
||||
thread_view.read_with(cx, |thread_view, _cx| {
|
||||
assert_eq!(thread_view.editing_message, Some(0));
|
||||
});
|
||||
|
||||
// Ensure to edit the focused message before proceeding otherwise, since
|
||||
// its content is not different from what was sent, focus will be lost.
|
||||
user_message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Original message to edit with ", window, cx)
|
||||
});
|
||||
|
||||
// Create a simple buffer with some text so we can create a selection
|
||||
// that will then be added to the message being edited.
|
||||
let (workspace, project) = thread_view.read_with(cx, |thread_view, _cx| {
|
||||
(thread_view.workspace.clone(), thread_view.project.clone())
|
||||
});
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("let a = 10 + 10;", None, false, cx)
|
||||
});
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([8..15]);
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, false, window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
assert_eq!(thread_view.editing_message, Some(0));
|
||||
thread_view.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
user_message_editor.read_with(cx, |editor, cx| {
|
||||
let text = editor.editor().read(cx).text(cx);
|
||||
let expected_text = String::from("Original message to edit with selection ");
|
||||
|
||||
assert_eq!(text, expected_text);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Can you review this snippet ", window, cx)
|
||||
});
|
||||
|
||||
// Create a simple buffer with some text so we can create a selection
|
||||
// that will then be added to the message being edited.
|
||||
let (workspace, project) = thread_view.read_with(cx, |thread_view, _cx| {
|
||||
(thread_view.workspace.clone(), thread_view.project.clone())
|
||||
});
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("let a = 10 + 10;", None, false, cx)
|
||||
});
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([8..15]);
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, false, window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
assert_eq!(thread_view.editing_message, None);
|
||||
thread_view.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
thread_view.read_with(cx, |thread_view, cx| {
|
||||
let text = thread_view.message_editor.read(cx).text(cx);
|
||||
let expected_txt = String::from("Can you review this snippet selection ");
|
||||
|
||||
assert_eq!(text, expected_txt);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ mod tool_picker;
|
||||
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use cloud_llm_client::{Plan, PlanV1, PlanV2};
|
||||
@@ -14,6 +15,7 @@ use context_server::ContextServerId;
|
||||
use editor::{Editor, SelectionEffects, scroll::Autoscroll};
|
||||
use extension::ExtensionManifest;
|
||||
use extension_host::ExtensionStore;
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
@@ -28,10 +30,10 @@ use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{SettingsStore, update_settings_file};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
|
||||
Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
Indicator, PopoverMenu, Switch, SwitchColor, SwitchField, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{Workspace, create_and_open_local_file};
|
||||
@@ -401,6 +403,101 @@ impl AgentConfiguration {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_command_permission(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"always-allow-tool-actions-switch",
|
||||
"Allow running commands without asking for confirmation",
|
||||
Some(
|
||||
"The agent can perform potentially destructive actions without asking for your confirmation.".into(),
|
||||
),
|
||||
always_allow_tool_actions,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings.agent.get_or_insert_default().set_always_allow_tool_actions(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_single_file_review(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let single_file_review = AgentSettings::get_global(cx).single_file_review;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"single-file-review",
|
||||
"Enable single-file agent reviews",
|
||||
Some("Agent edits are also displayed in single-file editors for review.".into()),
|
||||
single_file_review,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_single_file_review(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_sound_notification(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"sound-notification",
|
||||
"Play sound when finished generating",
|
||||
Some(
|
||||
"Hear a notification sound when the agent is done generating changes or needs your input.".into(),
|
||||
),
|
||||
play_sound_when_agent_done,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings.agent.get_or_insert_default().set_play_sound_when_agent_done(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_modifier_to_send(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"modifier-send",
|
||||
"Use modifier to submit a message",
|
||||
Some(
|
||||
"Make a modifier (cmd-enter on macOS, ctrl-enter on Linux or Windows) required to send messages.".into(),
|
||||
),
|
||||
use_modifier_to_send,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings.agent.get_or_insert_default().set_use_modifier_to_send(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_general_settings_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2p5()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Headline::new("General Settings"))
|
||||
.child(self.render_command_permission(cx))
|
||||
.child(self.render_single_file_review(cx))
|
||||
.child(self.render_sound_notification(cx))
|
||||
.child(self.render_modifier_to_send(cx))
|
||||
}
|
||||
|
||||
fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
if let Some(plan) = plan {
|
||||
let free_chip_bg = cx
|
||||
@@ -988,11 +1085,14 @@ impl AgentConfiguration {
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
})
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
@@ -1045,6 +1145,7 @@ impl Render for AgentConfiguration {
|
||||
.track_scroll(&self.scroll_handle)
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
.child(self.render_general_settings_section(cx))
|
||||
.child(self.render_agent_servers_section(cx))
|
||||
.child(self.render_context_servers_section(window, cx))
|
||||
.child(self.render_provider_configuration_section(cx)),
|
||||
|
||||
@@ -619,10 +619,10 @@ mod tests {
|
||||
cx.update(|_window, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.register_provider(
|
||||
Arc::new(FakeLanguageModelProvider::new(
|
||||
FakeLanguageModelProvider::new(
|
||||
LanguageModelProviderId::new("someprovider"),
|
||||
LanguageModelProviderName::new("Some Provider"),
|
||||
)),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -48,12 +48,12 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, Task, UpdateGlobal,
|
||||
WeakEntity, prelude::*,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, ReadGlobal as _, Subscription, Task,
|
||||
UpdateGlobal, WeakEntity, prelude::*,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{ConfigurationError, LanguageModelRegistry};
|
||||
use project::{Project, ProjectPath, Worktree};
|
||||
use project::{DisableAiSettings, Project, ProjectPath, Worktree};
|
||||
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
|
||||
use rules_library::{RulesLibrary, open_rules_library};
|
||||
use search::{BufferSearchBar, buffer_search};
|
||||
@@ -75,6 +75,7 @@ use zed_actions::{
|
||||
assistant::{OpenRulesLibrary, ToggleFocus},
|
||||
};
|
||||
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -519,6 +520,13 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
if SettingsStore::global(cx)
|
||||
.get::<DisableAiSettings>(None)
|
||||
.disable_ai
|
||||
{
|
||||
return panel;
|
||||
}
|
||||
|
||||
panel.as_mut(cx).loading = true;
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
@@ -670,6 +678,43 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
let mut old_disable_ai = false;
|
||||
cx.observe_global_in::<SettingsStore>(window, move |panel, window, cx| {
|
||||
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
|
||||
if old_disable_ai != disable_ai {
|
||||
let agent_panel_id = cx.entity_id();
|
||||
let agent_panel_visible = panel
|
||||
.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let agent_dock_position = panel.position(window, cx);
|
||||
let agent_dock = workspace.dock_at_position(agent_dock_position);
|
||||
let agent_panel_focused = agent_dock
|
||||
.read(cx)
|
||||
.active_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
let active_panel_visible = agent_dock
|
||||
.read(cx)
|
||||
.visible_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
if agent_panel_focused {
|
||||
cx.dispatch_action(&ToggleFocus);
|
||||
}
|
||||
|
||||
active_panel_visible
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if agent_panel_visible {
|
||||
cx.emit(PanelEvent::Close);
|
||||
}
|
||||
|
||||
old_disable_ai = disable_ai;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
active_view,
|
||||
workspace,
|
||||
@@ -1938,32 +1983,34 @@ impl AgentPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
|
||||
@@ -136,7 +136,6 @@ impl Tool for TerminalTool {
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
let shell = project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
@@ -156,7 +155,7 @@ impl Tool for TerminalTool {
|
||||
let build_cmd = {
|
||||
let input_command = input.command.clone();
|
||||
move || {
|
||||
ShellBuilder::new(&Shell::Program(shell), is_windows)
|
||||
ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(input_command), &[])
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, BackgroundExecutor, BorrowAppContext, Global};
|
||||
use log::info;
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
mod non_windows_and_freebsd_deps {
|
||||
pub(super) use gpui::AsyncApp;
|
||||
pub(super) use libwebrtc::native::apm;
|
||||
pub(super) use log::info;
|
||||
pub(super) use parking_lot::Mutex;
|
||||
pub(super) use rodio::cpal::Sample;
|
||||
pub(super) use rodio::source::LimitSettings;
|
||||
|
||||
@@ -649,7 +649,7 @@ impl AutoUpdater {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
anyhow::ensure!(
|
||||
which::which("rsync").is_ok(),
|
||||
"Could not auto-update because the required rsync utility was not found."
|
||||
"Aborting. Could not find rsync which is required for auto-updates."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
@@ -658,7 +658,7 @@ impl AutoUpdater {
|
||||
let filename = match OS {
|
||||
"macos" => anyhow::Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
"windows" => Ok("Zed.exe"),
|
||||
"windows" => Ok("zed_editor_installer.exe"),
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}?;
|
||||
|
||||
|
||||
@@ -127,6 +127,7 @@ pub struct DeclarationScoreComponents {
|
||||
pub declaration_count: usize,
|
||||
pub reference_line_distance: u32,
|
||||
pub declaration_line_distance: u32,
|
||||
pub declaration_line_distance_rank: usize,
|
||||
pub excerpt_vs_item_jaccard: f32,
|
||||
pub excerpt_vs_signature_jaccard: f32,
|
||||
pub adjacent_vs_item_jaccard: f32,
|
||||
@@ -135,15 +136,6 @@ pub struct DeclarationScoreComponents {
|
||||
pub excerpt_vs_signature_weighted_overlap: f32,
|
||||
pub adjacent_vs_item_weighted_overlap: f32,
|
||||
pub adjacent_vs_signature_weighted_overlap: f32,
|
||||
pub path_import_match_count: usize,
|
||||
pub wildcard_path_import_match_count: usize,
|
||||
pub import_similarity: f32,
|
||||
pub max_import_similarity: f32,
|
||||
pub normalized_import_similarity: f32,
|
||||
pub wildcard_import_similarity: f32,
|
||||
pub normalized_wildcard_import_similarity: f32,
|
||||
pub included_by_others: usize,
|
||||
pub includes_others: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
[package]
|
||||
name = "codestral"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lib]
|
||||
path = "src/codestral.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
edit_prediction.workspace = true
|
||||
edit_prediction_context.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
mistral.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
text.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -1 +0,0 @@
|
||||
../../LICENSE-GPL
|
||||
@@ -1,381 +0,0 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use edit_prediction::{Direction, EditPrediction, EditPredictionProvider};
|
||||
use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{App, Context, Entity, Task};
|
||||
use http_client::HttpClient;
|
||||
use language::{
|
||||
language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot, EditPreview, ToPoint,
|
||||
};
|
||||
use language_models::MistralLanguageModelProvider;
|
||||
use mistral::CODESTRAL_API_URL;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use text::ToOffset;
|
||||
|
||||
pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150);
|
||||
|
||||
const EXCERPT_OPTIONS: EditPredictionExcerptOptions = EditPredictionExcerptOptions {
|
||||
max_bytes: 1050,
|
||||
min_bytes: 525,
|
||||
target_before_cursor_over_total_bytes: 0.66,
|
||||
};
|
||||
|
||||
/// Represents a completion that has been received and processed from Codestral.
|
||||
/// This struct maintains the state needed to interpolate the completion as the user types.
|
||||
#[derive(Clone)]
|
||||
struct CurrentCompletion {
|
||||
/// The buffer snapshot at the time the completion was generated.
|
||||
/// Used to detect changes and interpolate edits.
|
||||
snapshot: BufferSnapshot,
|
||||
/// The edits that should be applied to transform the original text into the predicted text.
|
||||
/// Each edit is a range in the buffer and the text to replace it with.
|
||||
edits: Arc<[(Range<Anchor>, String)]>,
|
||||
/// Preview of how the buffer will look after applying the edits.
|
||||
edit_preview: EditPreview,
|
||||
}
|
||||
|
||||
impl CurrentCompletion {
|
||||
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
|
||||
/// Returns None if the user's edits conflict with the predicted edits.
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CodestralCompletionProvider {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pending_request: Option<Task<Result<()>>>,
|
||||
current_completion: Option<CurrentCompletion>,
|
||||
}
|
||||
|
||||
impl CodestralCompletionProvider {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>) -> Self {
|
||||
Self {
|
||||
http_client,
|
||||
pending_request: None,
|
||||
current_completion: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_api_key(cx: &App) -> bool {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
fn api_key(cx: &App) -> Option<Arc<str>> {
|
||||
MistralLanguageModelProvider::try_global(cx)
|
||||
.and_then(|provider| provider.codestral_api_key(CODESTRAL_API_URL, cx))
|
||||
}
|
||||
|
||||
/// Uses Codestral's Fill-in-the-Middle API for code completion.
|
||||
async fn fetch_completion(
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
api_key: &str,
|
||||
prompt: String,
|
||||
suffix: String,
|
||||
model: String,
|
||||
max_tokens: Option<u32>,
|
||||
) -> Result<String> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
log::debug!(
|
||||
"Codestral: Requesting completion (model: {}, max_tokens: {:?})",
|
||||
model,
|
||||
max_tokens
|
||||
);
|
||||
|
||||
let request = CodestralRequest {
|
||||
model,
|
||||
prompt,
|
||||
suffix: if suffix.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(suffix)
|
||||
},
|
||||
max_tokens: max_tokens.or(Some(350)),
|
||||
temperature: Some(0.2),
|
||||
top_p: Some(1.0),
|
||||
stream: Some(false),
|
||||
stop: None,
|
||||
random_seed: None,
|
||||
min_tokens: None,
|
||||
};
|
||||
|
||||
let request_body = serde_json::to_string(&request)?;
|
||||
|
||||
log::debug!("Codestral: Sending FIM request");
|
||||
|
||||
let http_request = http_client::Request::builder()
|
||||
.method(http_client::Method::POST)
|
||||
.uri(format!("{}/v1/fim/completions", CODESTRAL_API_URL))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(http_client::AsyncBody::from(request_body))?;
|
||||
|
||||
let mut response = http_client.send(http_request).await?;
|
||||
let status = response.status();
|
||||
|
||||
log::debug!("Codestral: Response status: {}", status);
|
||||
|
||||
if !status.is_success() {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
return Err(anyhow::anyhow!(
|
||||
"Codestral API error: {} - {}",
|
||||
status,
|
||||
body
|
||||
));
|
||||
}
|
||||
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let codestral_response: CodestralResponse = serde_json::from_str(&body)?;
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
|
||||
if let Some(choice) = codestral_response.choices.first() {
|
||||
let completion = &choice.message.content;
|
||||
|
||||
log::debug!(
|
||||
"Codestral: Completion received ({} tokens, {:.2}s)",
|
||||
codestral_response.usage.completion_tokens,
|
||||
elapsed.as_secs_f64()
|
||||
);
|
||||
|
||||
// Return just the completion text for insertion at cursor
|
||||
Ok(completion.clone())
|
||||
} else {
|
||||
log::error!("Codestral: No completion returned in response");
|
||||
Err(anyhow::anyhow!("No completion returned from Codestral"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
fn name() -> &'static str {
|
||||
"codestral"
|
||||
}
|
||||
|
||||
fn display_name() -> &'static str {
|
||||
"Codestral"
|
||||
}
|
||||
|
||||
fn show_completions_in_menu() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, cx: &App) -> bool {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
fn is_refreshing(&self) -> bool {
|
||||
self.pending_request.is_some()
|
||||
}
|
||||
|
||||
fn refresh(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
cursor_position: language::Anchor,
|
||||
debounce: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
log::debug!("Codestral: Refresh called (debounce: {})", debounce);
|
||||
|
||||
let Some(api_key) = Self::api_key(cx) else {
|
||||
log::warn!("Codestral: No API key configured, skipping refresh");
|
||||
return;
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
// Check if current completion is still valid
|
||||
if let Some(current_completion) = self.current_completion.as_ref() {
|
||||
if current_completion.interpolate(&snapshot).is_some() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let http_client = self.http_client.clone();
|
||||
|
||||
// Get settings
|
||||
let settings = all_language_settings(None, cx);
|
||||
let model = settings
|
||||
.edit_predictions
|
||||
.codestral
|
||||
.model
|
||||
.clone()
|
||||
.unwrap_or_else(|| "codestral-latest".to_string());
|
||||
let max_tokens = settings.edit_predictions.codestral.max_tokens;
|
||||
|
||||
self.pending_request = Some(cx.spawn(async move |this, cx| {
|
||||
if debounce {
|
||||
log::debug!("Codestral: Debouncing for {:?}", DEBOUNCE_TIMEOUT);
|
||||
smol::Timer::after(DEBOUNCE_TIMEOUT).await;
|
||||
}
|
||||
|
||||
let cursor_offset = cursor_position.to_offset(&snapshot);
|
||||
let cursor_point = cursor_offset.to_point(&snapshot);
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
&snapshot,
|
||||
&EXCERPT_OPTIONS,
|
||||
None,
|
||||
)
|
||||
.context("Line containing cursor doesn't fit in excerpt max bytes")?;
|
||||
|
||||
let excerpt_text = excerpt.text(&snapshot);
|
||||
let cursor_within_excerpt = cursor_offset
|
||||
.saturating_sub(excerpt.range.start)
|
||||
.min(excerpt_text.body.len());
|
||||
let prompt = excerpt_text.body[..cursor_within_excerpt].to_string();
|
||||
let suffix = excerpt_text.body[cursor_within_excerpt..].to_string();
|
||||
|
||||
let completion_text = match Self::fetch_completion(
|
||||
http_client,
|
||||
&api_key,
|
||||
prompt,
|
||||
suffix,
|
||||
model,
|
||||
max_tokens,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(completion) => completion,
|
||||
Err(e) => {
|
||||
log::error!("Codestral: Failed to fetch completion: {}", e);
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
if completion_text.trim().is_empty() {
|
||||
log::debug!("Codestral: Completion was empty after trimming; ignoring");
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let edits: Arc<[(Range<Anchor>, String)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text)].into();
|
||||
let edit_preview = buffer
|
||||
.read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
|
||||
.await;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.current_completion = Some(CurrentCompletion {
|
||||
snapshot,
|
||||
edits,
|
||||
edit_preview,
|
||||
});
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
fn cycle(
|
||||
&mut self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_cursor_position: Anchor,
|
||||
_direction: Direction,
|
||||
_cx: &mut Context<Self>,
|
||||
) {
|
||||
// Codestral doesn't support multiple completions, so cycling does nothing
|
||||
}
|
||||
|
||||
fn accept(&mut self, _cx: &mut Context<Self>) {
|
||||
log::debug!("Codestral: Completion accepted");
|
||||
self.pending_request = None;
|
||||
self.current_completion = None;
|
||||
}
|
||||
|
||||
fn discard(&mut self, _cx: &mut Context<Self>) {
|
||||
log::debug!("Codestral: Completion discarded");
|
||||
self.pending_request = None;
|
||||
self.current_completion = None;
|
||||
}
|
||||
|
||||
/// Returns the completion suggestion, adjusted or invalidated based on user edits
|
||||
fn suggest(
|
||||
&mut self,
|
||||
buffer: &Entity<Buffer>,
|
||||
_cursor_position: Anchor,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<EditPrediction> {
|
||||
let current_completion = self.current_completion.as_ref()?;
|
||||
let buffer = buffer.read(cx);
|
||||
let edits = current_completion.interpolate(&buffer.snapshot())?;
|
||||
if edits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(EditPrediction::Local {
|
||||
id: None,
|
||||
edits,
|
||||
edit_preview: Some(current_completion.edit_preview.clone()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CodestralRequest {
|
||||
pub model: String,
|
||||
pub prompt: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub suffix: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_tokens: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub temperature: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub top_p: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stream: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stop: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub random_seed: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CodestralResponse {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
pub model: String,
|
||||
pub usage: Usage,
|
||||
pub created: u64,
|
||||
pub choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Usage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Choice {
|
||||
pub index: u32,
|
||||
pub message: Message,
|
||||
pub finish_reason: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Message {
|
||||
pub content: String,
|
||||
pub role: String,
|
||||
}
|
||||
@@ -97,7 +97,6 @@ CREATE TABLE "worktree_entries" (
|
||||
"is_external" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
"is_deleted" BOOL NOT NULL,
|
||||
"is_hidden" BOOL NOT NULL,
|
||||
"git_status" INTEGER,
|
||||
"is_fifo" BOOL NOT NULL,
|
||||
PRIMARY KEY (project_id, worktree_id, id),
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
ALTER TABLE "worktree_entries"
|
||||
ADD "is_hidden" BOOL NOT NULL DEFAULT FALSE;
|
||||
@@ -282,7 +282,6 @@ impl Database {
|
||||
git_status: ActiveValue::set(None),
|
||||
is_external: ActiveValue::set(entry.is_external),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
is_hidden: ActiveValue::set(entry.is_hidden),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_fifo: ActiveValue::set(entry.is_fifo),
|
||||
}
|
||||
@@ -301,7 +300,6 @@ impl Database {
|
||||
worktree_entry::Column::MtimeNanos,
|
||||
worktree_entry::Column::CanonicalPath,
|
||||
worktree_entry::Column::IsIgnored,
|
||||
worktree_entry::Column::IsHidden,
|
||||
worktree_entry::Column::ScanId,
|
||||
])
|
||||
.to_owned(),
|
||||
@@ -907,7 +905,6 @@ impl Database {
|
||||
canonical_path: db_entry.canonical_path,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
is_hidden: db_entry.is_hidden,
|
||||
// This is only used in the summarization backlog, so if it's None,
|
||||
// that just means we won't be able to detect when to resummarize
|
||||
// based on total number of backlogged bytes - instead, we'd go
|
||||
|
||||
@@ -671,7 +671,6 @@ impl Database {
|
||||
canonical_path: db_entry.canonical_path,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
is_hidden: db_entry.is_hidden,
|
||||
// This is only used in the summarization backlog, so if it's None,
|
||||
// that just means we won't be able to detect when to resummarize
|
||||
// based on total number of backlogged bytes - instead, we'd go
|
||||
|
||||
@@ -19,7 +19,6 @@ pub struct Model {
|
||||
pub is_ignored: bool,
|
||||
pub is_external: bool,
|
||||
pub is_deleted: bool,
|
||||
pub is_hidden: bool,
|
||||
pub scan_id: i64,
|
||||
pub is_fifo: bool,
|
||||
pub canonical_path: Option<String>,
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::{
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use editor::{
|
||||
DocumentColorsRenderMode, Editor, FETCH_COLORS_DEBOUNCE_TIMEOUT, RowInfo, SelectionEffects,
|
||||
DocumentColorsRenderMode, Editor, RowInfo, SelectionEffects,
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst,
|
||||
ExpandMacroRecursively, MoveToEnd, Redo, Rename, SelectAll, ToggleCodeActions, Undo,
|
||||
@@ -1272,7 +1272,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
fake_language_server.start_progress("the-token").await;
|
||||
|
||||
executor.advance_clock(SERVER_PROGRESS_THROTTLE_TIMEOUT);
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report(
|
||||
lsp::WorkDoneProgressReport {
|
||||
@@ -1306,7 +1306,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
});
|
||||
|
||||
executor.advance_clock(SERVER_PROGRESS_THROTTLE_TIMEOUT);
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report(
|
||||
lsp::WorkDoneProgressReport {
|
||||
@@ -2409,7 +2409,6 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo
|
||||
.unwrap();
|
||||
|
||||
color_request_handle.next().await.unwrap();
|
||||
executor.advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
@@ -2849,7 +2848,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
});
|
||||
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
@@ -2870,7 +2869,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
},
|
||||
);
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
@@ -2892,7 +2891,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
);
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
@@ -3074,7 +3073,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
});
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
|
||||
@@ -4077,7 +4077,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await;
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4097,7 +4097,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.await
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4171,7 +4171,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
|
||||
// Simulate a language server reporting more errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![
|
||||
@@ -4269,7 +4269,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
|
||||
// Simulate a language server reporting no errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: Vec::new(),
|
||||
@@ -4365,7 +4365,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
|
||||
lsp::WorkDoneProgressBegin {
|
||||
@@ -4376,7 +4376,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
});
|
||||
for file_name in file_names {
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4389,7 +4389,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
},
|
||||
);
|
||||
}
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
|
||||
lsp::WorkDoneProgressEnd { message: None },
|
||||
|
||||
@@ -97,10 +97,11 @@ impl CommandPaletteFilter {
|
||||
pub struct CommandInterceptResult {
|
||||
/// The action produced as a result of the interception.
|
||||
pub action: Box<dyn Action>,
|
||||
/// The display string to show in the command palette for this result.
|
||||
// TODO: Document this field.
|
||||
#[allow(missing_docs)]
|
||||
pub string: String,
|
||||
/// The character positions in the string that match the query.
|
||||
/// Used for highlighting matched characters in the command palette UI.
|
||||
// TODO: Document this field.
|
||||
#[allow(missing_docs)]
|
||||
pub positions: Vec<usize>,
|
||||
}
|
||||
|
||||
|
||||
@@ -41,9 +41,12 @@ impl StdioTransport {
|
||||
command.current_dir(working_directory);
|
||||
}
|
||||
|
||||
let mut server = command
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command {command:?})",))?;
|
||||
let mut server = command.spawn().with_context(|| {
|
||||
format!(
|
||||
"failed to spawn command. (path={:?}, args={:?})",
|
||||
binary.executable, &binary.args
|
||||
)
|
||||
})?;
|
||||
|
||||
let stdin = server.stdin.take().unwrap();
|
||||
let stdout = server.stdout.take().unwrap();
|
||||
|
||||
@@ -270,7 +270,7 @@ impl RegisteredBuffer {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidChangeTextDocument>(
|
||||
lsp::DidChangeTextDocumentParams {
|
||||
&lsp::DidChangeTextDocumentParams {
|
||||
text_document: lsp::VersionedTextDocumentIdentifier::new(
|
||||
buffer.uri.clone(),
|
||||
buffer.snapshot_version,
|
||||
@@ -744,7 +744,7 @@ impl Copilot {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
server
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
&lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem {
|
||||
uri: uri.clone(),
|
||||
language_id: language_id.clone(),
|
||||
@@ -792,14 +792,13 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidSaveTextDocument>(
|
||||
lsp::DidSaveTextDocumentParams {
|
||||
&lsp::DidSaveTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(
|
||||
registered_buffer.uri.clone(),
|
||||
),
|
||||
text: None,
|
||||
},
|
||||
)
|
||||
.ok();
|
||||
)?;
|
||||
}
|
||||
language::BufferEvent::FileHandleChanged
|
||||
| language::BufferEvent::LanguageChanged => {
|
||||
@@ -815,15 +814,14 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
&lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(old_uri),
|
||||
},
|
||||
)
|
||||
.ok();
|
||||
)?;
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
&lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
registered_buffer.uri.clone(),
|
||||
registered_buffer.language_id.clone(),
|
||||
@@ -831,8 +829,7 @@ impl Copilot {
|
||||
registered_buffer.snapshot.text(),
|
||||
),
|
||||
},
|
||||
)
|
||||
.ok();
|
||||
)?;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -849,7 +846,7 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
&lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer.uri),
|
||||
},
|
||||
)
|
||||
@@ -1154,12 +1151,9 @@ fn notify_did_change_config_to_server(
|
||||
}
|
||||
});
|
||||
|
||||
server
|
||||
.notify::<lsp::notification::DidChangeConfiguration>(lsp::DidChangeConfigurationParams {
|
||||
settings,
|
||||
})
|
||||
.ok();
|
||||
Ok(())
|
||||
server.notify::<lsp::notification::DidChangeConfiguration>(&lsp::DidChangeConfigurationParams {
|
||||
settings,
|
||||
})
|
||||
}
|
||||
|
||||
async fn clear_copilot_dir() {
|
||||
|
||||
@@ -46,7 +46,6 @@ pub trait DapDelegate: Send + Sync + 'static {
|
||||
async fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn read_text_file(&self, path: &RelPath) -> Result<String>;
|
||||
async fn shell_env(&self) -> collections::HashMap<String, String>;
|
||||
fn is_headless(&self) -> bool;
|
||||
}
|
||||
|
||||
#[derive(
|
||||
|
||||
@@ -674,7 +674,13 @@ impl StdioTransport {
|
||||
command.args(&binary.arguments);
|
||||
command.envs(&binary.envs);
|
||||
|
||||
let mut process = Child::spawn(command, Stdio::piped())?;
|
||||
let mut process = Child::spawn(command, Stdio::piped()).with_context(|| {
|
||||
format!(
|
||||
"failed to spawn command `{} {}`.",
|
||||
binary_command,
|
||||
binary.arguments.join(" ")
|
||||
)
|
||||
})?;
|
||||
|
||||
let err_task = process.stderr.take().map(|stderr| {
|
||||
cx.background_spawn(TransportDelegate::handle_adapter_log(
|
||||
@@ -1052,13 +1058,11 @@ impl Child {
|
||||
#[cfg(not(windows))]
|
||||
fn spawn(mut command: std::process::Command, stdin: Stdio) -> Result<Self> {
|
||||
util::set_pre_exec_to_start_new_session(&mut command);
|
||||
let mut command = smol::process::Command::from(command);
|
||||
let process = command
|
||||
let process = smol::process::Command::from(command)
|
||||
.stdin(stdin)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command `{command:?}`",))?;
|
||||
.spawn()?;
|
||||
Ok(Self { process })
|
||||
}
|
||||
|
||||
@@ -1066,13 +1070,11 @@ impl Child {
|
||||
fn spawn(command: std::process::Command, stdin: Stdio) -> Result<Self> {
|
||||
// TODO(windows): create a job object and add the child process handle to it,
|
||||
// see https://learn.microsoft.com/en-us/windows/win32/procthread/job-objects
|
||||
let mut command = smol::process::Command::from(command);
|
||||
let process = command
|
||||
let process = smol::process::Command::from(command)
|
||||
.stdin(stdin)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command `{command:?}`",))?;
|
||||
.spawn()?;
|
||||
Ok(Self { process })
|
||||
}
|
||||
|
||||
|
||||
@@ -120,13 +120,6 @@ impl JsDebugAdapter {
|
||||
configuration
|
||||
.entry("sourceMapRenames")
|
||||
.or_insert(true.into());
|
||||
|
||||
// Set up remote browser debugging
|
||||
if delegate.is_headless() {
|
||||
configuration
|
||||
.entry("browserLaunchLocation")
|
||||
.or_insert("ui".into());
|
||||
}
|
||||
}
|
||||
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
|
||||
@@ -963,21 +963,26 @@ pub fn init(cx: &mut App) {
|
||||
};
|
||||
|
||||
let project = workspace.project();
|
||||
log_store.update(cx, |store, cx| {
|
||||
store.add_project(project, cx);
|
||||
});
|
||||
if project.read(cx).is_local() {
|
||||
log_store.update(cx, |store, cx| {
|
||||
store.add_project(project, cx);
|
||||
});
|
||||
}
|
||||
|
||||
let log_store = log_store.clone();
|
||||
workspace.register_action(move |workspace, _: &OpenDebugAdapterLogs, window, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
DapLogView::new(workspace.project().clone(), log_store.clone(), window, cx)
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let project = workspace.project().read(cx);
|
||||
if project.is_local() {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
DapLogView::new(workspace.project().clone(), log_store.clone(), window, cx)
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
||||
@@ -937,7 +937,6 @@ impl RunningState {
|
||||
let task_store = project.read(cx).task_store().downgrade();
|
||||
let weak_project = project.downgrade();
|
||||
let weak_workspace = workspace.downgrade();
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
let remote_shell = project
|
||||
.read(cx)
|
||||
.remote_client()
|
||||
@@ -1030,7 +1029,7 @@ impl RunningState {
|
||||
task.resolved.shell = Shell::Program(remote_shell);
|
||||
}
|
||||
|
||||
let builder = ShellBuilder::new(&task.resolved.shell, is_windows);
|
||||
let builder = ShellBuilder::new(&task.resolved.shell);
|
||||
let command_label = builder.command_label(task.resolved.command.as_deref().unwrap_or(""));
|
||||
let (command, args) =
|
||||
builder.build(task.resolved.command.clone(), &task.resolved.args);
|
||||
|
||||
@@ -965,11 +965,10 @@ async fn heuristic_syntactic_expand(
|
||||
let row_count = node_end.row - node_start.row + 1;
|
||||
let mut ancestor_range = None;
|
||||
let reached_outline_node = cx.background_executor().scoped({
|
||||
let node_range = node_range.clone();
|
||||
let outline_range = outline_range.clone();
|
||||
let ancestor_range = &mut ancestor_range;
|
||||
|scope| {
|
||||
scope.spawn(async move {
|
||||
let node_range = node_range.clone();
|
||||
let outline_range = outline_range.clone();
|
||||
let ancestor_range = &mut ancestor_range;
|
||||
|scope| {scope.spawn(async move {
|
||||
// Stop if we've exceeded the row count or reached an outline node. Then, find the interval
|
||||
// of node children which contains the query range. For example, this allows just returning
|
||||
// the header of a declaration rather than the entire declaration.
|
||||
@@ -981,11 +980,8 @@ async fn heuristic_syntactic_expand(
|
||||
if cursor.goto_first_child() {
|
||||
loop {
|
||||
let child_node = cursor.node();
|
||||
let child_range =
|
||||
previous_end..Point::from_ts_point(child_node.end_position());
|
||||
if included_child_start.is_none()
|
||||
&& child_range.contains(&input_range.start)
|
||||
{
|
||||
let child_range = previous_end..Point::from_ts_point(child_node.end_position());
|
||||
if included_child_start.is_none() && child_range.contains(&input_range.start) {
|
||||
included_child_start = Some(child_range.start);
|
||||
}
|
||||
if child_range.contains(&input_range.end) {
|
||||
@@ -1001,22 +997,19 @@ async fn heuristic_syntactic_expand(
|
||||
if let Some(start) = included_child_start {
|
||||
let row_count = end.row - start.row;
|
||||
if row_count < max_row_count {
|
||||
*ancestor_range =
|
||||
Some(Some(RangeInclusive::new(start.row, end.row)));
|
||||
*ancestor_range = Some(Some(RangeInclusive::new(start.row, end.row)));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Expanding to ancestor started on {} node\
|
||||
exceeding row limit of {max_row_count}.",
|
||||
"Expanding to ancestor started on {} node exceeding row limit of {max_row_count}.",
|
||||
node.grammar_name()
|
||||
);
|
||||
*ancestor_range = Some(None);
|
||||
}
|
||||
})
|
||||
}
|
||||
});
|
||||
}});
|
||||
reached_outline_node.await;
|
||||
if let Some(node) = ancestor_range {
|
||||
return node;
|
||||
|
||||
@@ -20,8 +20,6 @@ util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
zed.workspace = true
|
||||
zlog.workspace = true
|
||||
task.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -53,20 +53,9 @@ fn main() -> Result<()> {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
enum PreprocessorError {
|
||||
ActionNotFound {
|
||||
action_name: String,
|
||||
},
|
||||
DeprecatedActionUsed {
|
||||
used: String,
|
||||
should_be: String,
|
||||
},
|
||||
ActionNotFound { action_name: String },
|
||||
DeprecatedActionUsed { used: String, should_be: String },
|
||||
InvalidFrontmatterLine(String),
|
||||
InvalidSettingsJson {
|
||||
file: std::path::PathBuf,
|
||||
line: usize,
|
||||
snippet: String,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl PreprocessorError {
|
||||
@@ -83,20 +72,6 @@ impl PreprocessorError {
|
||||
}
|
||||
PreprocessorError::ActionNotFound { action_name }
|
||||
}
|
||||
|
||||
fn new_for_invalid_settings_json(
|
||||
chapter: &Chapter,
|
||||
location: usize,
|
||||
snippet: String,
|
||||
error: String,
|
||||
) -> Self {
|
||||
PreprocessorError::InvalidSettingsJson {
|
||||
file: chapter.path.clone().expect("chapter has path"),
|
||||
line: chapter.content[..location].lines().count() + 1,
|
||||
snippet,
|
||||
error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PreprocessorError {
|
||||
@@ -113,21 +88,6 @@ impl std::fmt::Display for PreprocessorError {
|
||||
"Deprecated action used: {} should be {}",
|
||||
used, should_be
|
||||
),
|
||||
PreprocessorError::InvalidSettingsJson {
|
||||
file,
|
||||
line,
|
||||
snippet,
|
||||
error,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"Invalid settings JSON at {}:{}\nError: {}\n\n{}",
|
||||
file.display(),
|
||||
line,
|
||||
error,
|
||||
snippet
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -140,11 +100,11 @@ fn handle_preprocessing() -> Result<()> {
|
||||
let (_ctx, mut book) = CmdPreprocessor::parse_input(input.as_bytes())?;
|
||||
|
||||
let mut errors = HashSet::<PreprocessorError>::new();
|
||||
|
||||
handle_frontmatter(&mut book, &mut errors);
|
||||
template_big_table_of_actions(&mut book);
|
||||
template_and_validate_keybindings(&mut book, &mut errors);
|
||||
template_and_validate_actions(&mut book, &mut errors);
|
||||
template_and_validate_json_snippets(&mut book, &mut errors);
|
||||
|
||||
if !errors.is_empty() {
|
||||
const ANSI_RED: &str = "\x1b[31m";
|
||||
@@ -275,161 +235,6 @@ fn find_binding(os: &str, action: &str) -> Option<String> {
|
||||
})
|
||||
}
|
||||
|
||||
fn template_and_validate_json_snippets(book: &mut Book, errors: &mut HashSet<PreprocessorError>) {
|
||||
fn for_each_labeled_code_block_mut(
|
||||
book: &mut Book,
|
||||
errors: &mut HashSet<PreprocessorError>,
|
||||
f: impl Fn(&str, &str) -> anyhow::Result<()>,
|
||||
) {
|
||||
const TAGGED_JSON_BLOCK_START: &'static str = "```json [";
|
||||
const JSON_BLOCK_END: &'static str = "```";
|
||||
|
||||
for_each_chapter_mut(book, |chapter| {
|
||||
let mut offset = 0;
|
||||
while let Some(loc) = chapter.content[offset..].find(TAGGED_JSON_BLOCK_START) {
|
||||
let loc = loc + offset;
|
||||
let tag_start = loc + TAGGED_JSON_BLOCK_START.len();
|
||||
offset = tag_start;
|
||||
let Some(tag_end) = chapter.content[tag_start..].find(']') else {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..tag_start].to_string(),
|
||||
"Unclosed JSON block tag".to_string(),
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let tag_end = tag_end + tag_start;
|
||||
|
||||
let tag = &chapter.content[tag_start..tag_end];
|
||||
|
||||
if tag.contains('\n') {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..tag_start].to_string(),
|
||||
"Unclosed JSON block tag".to_string(),
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
let snippet_start = tag_end + 1;
|
||||
offset = snippet_start;
|
||||
|
||||
let Some(snippet_end) = chapter.content[snippet_start..].find(JSON_BLOCK_END)
|
||||
else {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..tag_end + 1].to_string(),
|
||||
"Missing closing code block".to_string(),
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let snippet_end = snippet_start + snippet_end;
|
||||
let snippet_json = &chapter.content[snippet_start..snippet_end];
|
||||
offset = snippet_end + 3;
|
||||
|
||||
if let Err(err) = f(tag, snippet_json) {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..snippet_end + 3].to_string(),
|
||||
err.to_string(),
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let tag_range_complete = tag_start - 1..tag_end + 1;
|
||||
offset -= tag_range_complete.len();
|
||||
chapter.content.replace_range(tag_range_complete, "");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for_each_labeled_code_block_mut(book, errors, |label, snippet_json| {
|
||||
let mut snippet_json_fixed = snippet_json
|
||||
.to_string()
|
||||
.replace("\n>", "\n")
|
||||
.trim()
|
||||
.to_string();
|
||||
while snippet_json_fixed.starts_with("//") {
|
||||
if let Some(line_end) = snippet_json_fixed.find('\n') {
|
||||
snippet_json_fixed.replace_range(0..line_end, "");
|
||||
snippet_json_fixed = snippet_json_fixed.trim().to_string();
|
||||
}
|
||||
}
|
||||
match label {
|
||||
"settings" => {
|
||||
if !snippet_json_fixed.starts_with('{') || !snippet_json_fixed.ends_with('}') {
|
||||
snippet_json_fixed.insert(0, '{');
|
||||
snippet_json_fixed.push_str("\n}");
|
||||
}
|
||||
settings::parse_json_with_comments::<settings::SettingsContent>(
|
||||
&snippet_json_fixed,
|
||||
)?;
|
||||
}
|
||||
"keymap" => {
|
||||
if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
|
||||
snippet_json_fixed.insert(0, '[');
|
||||
snippet_json_fixed.push_str("\n]");
|
||||
}
|
||||
|
||||
let keymap = settings::KeymapFile::parse(&snippet_json_fixed)
|
||||
.context("Failed to parse keymap JSON")?;
|
||||
for section in keymap.sections() {
|
||||
for (keystrokes, action) in section.bindings() {
|
||||
keystrokes
|
||||
.split_whitespace()
|
||||
.map(|source| gpui::Keystroke::parse(source))
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("Failed to parse keystroke")?;
|
||||
if let Some((action_name, _)) = settings::KeymapFile::parse_action(action)
|
||||
.map_err(|err| anyhow::format_err!(err))
|
||||
.context("Failed to parse action")?
|
||||
{
|
||||
anyhow::ensure!(
|
||||
find_action_by_name(action_name).is_some(),
|
||||
"Action not found: {}",
|
||||
action_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"debug" => {
|
||||
if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
|
||||
snippet_json_fixed.insert(0, '[');
|
||||
snippet_json_fixed.push_str("\n]");
|
||||
}
|
||||
|
||||
settings::parse_json_with_comments::<task::DebugTaskFile>(&snippet_json_fixed)?;
|
||||
}
|
||||
"tasks" => {
|
||||
if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
|
||||
snippet_json_fixed.insert(0, '[');
|
||||
snippet_json_fixed.push_str("\n]");
|
||||
}
|
||||
|
||||
settings::parse_json_with_comments::<task::TaskTemplates>(&snippet_json_fixed)?;
|
||||
}
|
||||
"icon-theme" => {
|
||||
if !snippet_json_fixed.starts_with('{') || !snippet_json_fixed.ends_with('}') {
|
||||
snippet_json_fixed.insert(0, '{');
|
||||
snippet_json_fixed.push_str("\n}");
|
||||
}
|
||||
|
||||
settings::parse_json_with_comments::<theme::IconThemeFamilyContent>(
|
||||
&snippet_json_fixed,
|
||||
)?;
|
||||
}
|
||||
label => {
|
||||
anyhow::bail!("Unexpected JSON code block tag: {}", label)
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
/// Removes any configurable options from the stringified action if existing,
|
||||
/// ensuring that only the actual action name is returned. If the action consists
|
||||
/// only of a string and nothing else, the string is returned as-is.
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::ops::Range;
|
||||
|
||||
use client::EditPredictionUsage;
|
||||
use gpui::{App, Context, Entity, SharedString};
|
||||
use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt};
|
||||
use language::Buffer;
|
||||
|
||||
// TODO: Find a better home for `Direction`.
|
||||
//
|
||||
@@ -242,51 +242,3 @@ where
|
||||
self.update(cx, |this, cx| this.suggest(buffer, cursor_position, cx))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns edits updated based on user edits since the old snapshot. None is returned if any user
|
||||
/// edit is not a prefix of a predicted insertion.
|
||||
pub fn interpolate_edits(
|
||||
old_snapshot: &BufferSnapshot,
|
||||
new_snapshot: &BufferSnapshot,
|
||||
current_edits: &[(Range<Anchor>, String)],
|
||||
) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
let mut edits = Vec::new();
|
||||
|
||||
let mut model_edits = current_edits.iter().peekable();
|
||||
for user_edit in new_snapshot.edits_since::<usize>(&old_snapshot.version) {
|
||||
while let Some((model_old_range, _)) = model_edits.peek() {
|
||||
let model_old_range = model_old_range.to_offset(old_snapshot);
|
||||
if model_old_range.end < user_edit.old.start {
|
||||
let (model_old_range, model_new_text) = model_edits.next().unwrap();
|
||||
edits.push((model_old_range.clone(), model_new_text.clone()));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((model_old_range, model_new_text)) = model_edits.peek() {
|
||||
let model_old_offset_range = model_old_range.to_offset(old_snapshot);
|
||||
if user_edit.old == model_old_offset_range {
|
||||
let user_new_text = new_snapshot
|
||||
.text_for_range(user_edit.new.clone())
|
||||
.collect::<String>();
|
||||
|
||||
if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) {
|
||||
if !model_suffix.is_empty() {
|
||||
let anchor = old_snapshot.anchor_after(user_edit.old.end);
|
||||
edits.push((anchor..anchor, model_suffix.to_string()));
|
||||
}
|
||||
|
||||
model_edits.next();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return None;
|
||||
}
|
||||
|
||||
edits.extend(model_edits.cloned());
|
||||
|
||||
if edits.is_empty() { None } else { Some(edits) }
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ doctest = false
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
codestral.workspace = true
|
||||
copilot.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use client::{UserStore, zed_urls};
|
||||
use cloud_llm_client::UsageLimit;
|
||||
use codestral::CodestralCompletionProvider;
|
||||
use copilot::{Copilot, Status};
|
||||
use editor::{Editor, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll};
|
||||
use feature_flags::{FeatureFlagAppExt, PredictEditsRateCompletionsFeatureFlag};
|
||||
@@ -235,67 +234,6 @@ impl Render for EditPredictionButton {
|
||||
)
|
||||
}
|
||||
|
||||
EditPredictionProvider::Codestral => {
|
||||
let enabled = self.editor_enabled.unwrap_or(true);
|
||||
let has_api_key = CodestralCompletionProvider::has_api_key(cx);
|
||||
let fs = self.fs.clone();
|
||||
let this = cx.entity();
|
||||
|
||||
div().child(
|
||||
PopoverMenu::new("codestral")
|
||||
.menu(move |window, cx| {
|
||||
if has_api_key {
|
||||
Some(this.update(cx, |this, cx| {
|
||||
this.build_codestral_context_menu(window, cx)
|
||||
}))
|
||||
} else {
|
||||
Some(ContextMenu::build(window, cx, |menu, _, _| {
|
||||
let fs = fs.clone();
|
||||
menu.entry("Use Zed AI instead", None, move |_, cx| {
|
||||
set_completion_provider(
|
||||
fs.clone(),
|
||||
cx,
|
||||
EditPredictionProvider::Zed,
|
||||
)
|
||||
})
|
||||
.separator()
|
||||
.entry(
|
||||
"Configure Codestral API Key",
|
||||
None,
|
||||
move |window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::agent::OpenSettings.boxed_clone(),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
)
|
||||
}))
|
||||
}
|
||||
})
|
||||
.anchor(Corner::BottomRight)
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("codestral-icon", IconName::AiMistral)
|
||||
.shape(IconButtonShape::Square)
|
||||
.when(!has_api_key, |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Error))
|
||||
.indicator_border_color(Some(
|
||||
cx.theme().colors().status_bar_background,
|
||||
))
|
||||
})
|
||||
.when(has_api_key && !enabled, |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Ignored))
|
||||
.indicator_border_color(Some(
|
||||
cx.theme().colors().status_bar_background,
|
||||
))
|
||||
}),
|
||||
move |window, cx| {
|
||||
Tooltip::for_action("Codestral", &ToggleMenu, window, cx)
|
||||
},
|
||||
)
|
||||
.with_handle(self.popover_menu_handle.clone()),
|
||||
)
|
||||
}
|
||||
|
||||
EditPredictionProvider::Zed => {
|
||||
let enabled = self.editor_enabled.unwrap_or(true);
|
||||
|
||||
@@ -555,7 +493,6 @@ impl EditPredictionButton {
|
||||
EditPredictionProvider::Zed
|
||||
| EditPredictionProvider::Copilot
|
||||
| EditPredictionProvider::Supermaven
|
||||
| EditPredictionProvider::Codestral
|
||||
) {
|
||||
menu = menu
|
||||
.separator()
|
||||
@@ -782,25 +719,6 @@ impl EditPredictionButton {
|
||||
})
|
||||
}
|
||||
|
||||
fn build_codestral_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
let fs = self.fs.clone();
|
||||
ContextMenu::build(window, cx, |menu, window, cx| {
|
||||
self.build_language_settings_menu(menu, window, cx)
|
||||
.separator()
|
||||
.entry("Use Zed AI instead", None, move |_, cx| {
|
||||
set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed)
|
||||
})
|
||||
.separator()
|
||||
.entry("Configure Codestral API Key", None, move |window, cx| {
|
||||
window.dispatch_action(zed_actions::agent::OpenSettings.boxed_clone(), cx);
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn build_zeta_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
|
||||
@@ -19,7 +19,6 @@ collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
hashbrown.workspace = true
|
||||
indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
@@ -46,8 +45,5 @@ project = {workspace= true, features = ["test-support"]}
|
||||
serde_json.workspace = true
|
||||
settings = {workspace= true, features = ["test-support"]}
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-c.workspace = true
|
||||
tree-sitter-cpp.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
use language::{Language, LanguageId};
|
||||
use language::LanguageId;
|
||||
use project::ProjectEntryId;
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use std::{borrow::Cow, path::Path};
|
||||
use text::{Bias, BufferId, Rope};
|
||||
use util::paths::{path_ends_with, strip_path_suffix};
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
use crate::outline::OutlineDeclaration;
|
||||
|
||||
@@ -24,14 +22,12 @@ pub enum Declaration {
|
||||
File {
|
||||
project_entry_id: ProjectEntryId,
|
||||
declaration: FileDeclaration,
|
||||
cached_path: CachedDeclarationPath,
|
||||
},
|
||||
Buffer {
|
||||
project_entry_id: ProjectEntryId,
|
||||
buffer_id: BufferId,
|
||||
rope: Rope,
|
||||
declaration: BufferDeclaration,
|
||||
cached_path: CachedDeclarationPath,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -77,13 +73,6 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cached_path(&self) -> &CachedDeclarationPath {
|
||||
match self {
|
||||
Declaration::File { cached_path, .. } => cached_path,
|
||||
Declaration::Buffer { cached_path, .. } => cached_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_range(&self) -> Range<usize> {
|
||||
match self {
|
||||
Declaration::File { declaration, .. } => declaration.item_range.clone(),
|
||||
@@ -246,69 +235,3 @@ impl BufferDeclaration {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedDeclarationPath {
|
||||
pub worktree_abs_path: Arc<Path>,
|
||||
pub rel_path: Arc<RelPath>,
|
||||
/// The relative path of the file, possibly stripped according to `import_path_strip_regex`.
|
||||
pub rel_path_after_regex_stripping: Arc<RelPath>,
|
||||
}
|
||||
|
||||
impl CachedDeclarationPath {
|
||||
pub fn new(
|
||||
worktree_abs_path: Arc<Path>,
|
||||
path: &Arc<RelPath>,
|
||||
language: Option<&Arc<Language>>,
|
||||
) -> Self {
|
||||
let rel_path = path.clone();
|
||||
let rel_path_after_regex_stripping = if let Some(language) = language
|
||||
&& let Some(strip_regex) = language.config().import_path_strip_regex.as_ref()
|
||||
&& let Ok(stripped) = RelPath::unix(&Path::new(
|
||||
strip_regex.replace_all(rel_path.as_unix_str(), "").as_ref(),
|
||||
)) {
|
||||
Arc::from(stripped)
|
||||
} else {
|
||||
rel_path.clone()
|
||||
};
|
||||
CachedDeclarationPath {
|
||||
worktree_abs_path,
|
||||
rel_path,
|
||||
rel_path_after_regex_stripping,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_for_test(worktree_abs_path: &str, rel_path: &str) -> Self {
|
||||
let rel_path: Arc<RelPath> = util::rel_path::rel_path(rel_path).into();
|
||||
CachedDeclarationPath {
|
||||
worktree_abs_path: std::path::PathBuf::from(worktree_abs_path).into(),
|
||||
rel_path_after_regex_stripping: rel_path.clone(),
|
||||
rel_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ends_with_posix_path(&self, path: &Path) -> bool {
|
||||
if path.as_os_str().len() <= self.rel_path_after_regex_stripping.as_unix_str().len() {
|
||||
path_ends_with(self.rel_path_after_regex_stripping.as_std_path(), path)
|
||||
} else {
|
||||
if let Some(remaining) =
|
||||
strip_path_suffix(path, self.rel_path_after_regex_stripping.as_std_path())
|
||||
{
|
||||
path_ends_with(&self.worktree_abs_path, remaining)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn equals_absolute_path(&self, path: &Path) -> bool {
|
||||
if let Some(remaining) =
|
||||
strip_path_suffix(path, &self.rel_path_after_regex_stripping.as_std_path())
|
||||
{
|
||||
self.worktree_abs_path.as_ref() == remaining
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents;
|
||||
use collections::HashMap;
|
||||
use itertools::Itertools as _;
|
||||
use language::BufferSnapshot;
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::ProjectEntryId;
|
||||
use serde::Serialize;
|
||||
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
use strum::EnumIter;
|
||||
use text::{Point, ToPoint};
|
||||
use util::RangeExt as _;
|
||||
|
||||
use crate::{
|
||||
CachedDeclarationPath, Declaration, EditPredictionExcerpt, Identifier,
|
||||
imports::{Import, Imports, Module},
|
||||
Declaration, EditPredictionExcerpt, Identifier,
|
||||
reference::{Reference, ReferenceRegion},
|
||||
syntax_index::SyntaxIndexState,
|
||||
text_similarity::{Occurrences, jaccard_similarity, weighted_overlap_coefficient},
|
||||
@@ -19,17 +17,12 @@ use crate::{
|
||||
|
||||
const MAX_IDENTIFIER_DECLARATION_COUNT: usize = 16;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct EditPredictionScoreOptions {
|
||||
pub omit_excerpt_overlaps: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ScoredDeclaration {
|
||||
/// identifier used by the local reference
|
||||
pub identifier: Identifier,
|
||||
pub declaration: Declaration,
|
||||
pub components: DeclarationScoreComponents,
|
||||
pub score_components: DeclarationScoreComponents,
|
||||
pub scores: DeclarationScores,
|
||||
}
|
||||
|
||||
#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
@@ -38,61 +31,15 @@ pub enum DeclarationStyle {
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Default)]
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
impl ScoredDeclaration {
|
||||
/// Returns the score for this declaration with the specified style.
|
||||
pub fn score(&self, style: DeclarationStyle) -> f32 {
|
||||
// TODO: handle truncation
|
||||
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let retrieval = self.retrieval_score();
|
||||
|
||||
// Score related to the distance between the reference and cursor, range 0 to 1
|
||||
let distance_score = if self.components.is_referenced_nearby {
|
||||
1.0 / (1.0 + self.components.reference_line_distance as f32 / 10.0).powf(2.0)
|
||||
} else {
|
||||
// same score as ~14 lines away, rationale is to not overly penalize references from parent signatures
|
||||
0.5
|
||||
};
|
||||
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
match style {
|
||||
DeclarationStyle::Signature => {
|
||||
combined_score * self.components.excerpt_vs_signature_weighted_overlap
|
||||
}
|
||||
DeclarationStyle::Declaration => {
|
||||
2.0 * combined_score * self.components.excerpt_vs_item_weighted_overlap
|
||||
}
|
||||
DeclarationStyle::Signature => self.scores.signature,
|
||||
DeclarationStyle::Declaration => self.scores.declaration,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn retrieval_score(&self) -> f32 {
|
||||
let mut score = if self.components.is_same_file {
|
||||
10.0 / self.components.same_file_declaration_count as f32
|
||||
} else if self.components.path_import_match_count > 0 {
|
||||
3.0
|
||||
} else if self.components.wildcard_path_import_match_count > 0 {
|
||||
1.0
|
||||
} else if self.components.normalized_import_similarity > 0.0 {
|
||||
self.components.normalized_import_similarity
|
||||
} else if self.components.normalized_wildcard_import_similarity > 0.0 {
|
||||
0.5 * self.components.normalized_wildcard_import_similarity
|
||||
} else {
|
||||
1.0 / self.components.declaration_count as f32
|
||||
};
|
||||
score *= 1. + self.components.included_by_others as f32 / 2.;
|
||||
score *= 1. + self.components.includes_others as f32 / 4.;
|
||||
score
|
||||
}
|
||||
|
||||
pub fn size(&self, style: DeclarationStyle) -> usize {
|
||||
match &self.declaration {
|
||||
Declaration::File { declaration, .. } => match style {
|
||||
@@ -107,259 +54,110 @@ impl ScoredDeclaration {
|
||||
}
|
||||
|
||||
pub fn score_density(&self, style: DeclarationStyle) -> f32 {
|
||||
self.score(style) / self.size(style) as f32
|
||||
self.score(style) / (self.size(style)) as f32
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scored_declarations(
|
||||
options: &EditPredictionScoreOptions,
|
||||
index: &SyntaxIndexState,
|
||||
excerpt: &EditPredictionExcerpt,
|
||||
excerpt_occurrences: &Occurrences,
|
||||
adjacent_occurrences: &Occurrences,
|
||||
imports: &Imports,
|
||||
identifier_to_references: HashMap<Identifier, Vec<Reference>>,
|
||||
cursor_offset: usize,
|
||||
current_buffer: &BufferSnapshot,
|
||||
) -> Vec<ScoredDeclaration> {
|
||||
let cursor_point = cursor_offset.to_point(¤t_buffer);
|
||||
|
||||
let mut wildcard_import_occurrences = Vec::new();
|
||||
let mut wildcard_import_paths = Vec::new();
|
||||
for wildcard_import in imports.wildcard_modules.iter() {
|
||||
match wildcard_import {
|
||||
Module::Namespace(namespace) => {
|
||||
wildcard_import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => wildcard_import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
wildcard_import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut declarations = identifier_to_references
|
||||
.into_iter()
|
||||
.flat_map(|(identifier, references)| {
|
||||
let declarations =
|
||||
index.declarations_for_identifier::<MAX_IDENTIFIER_DECLARATION_COUNT>(&identifier);
|
||||
let declaration_count = declarations.len();
|
||||
|
||||
let mut scored_declarations = Vec::new();
|
||||
let mut project_entry_id_to_outline_ranges: HashMap<ProjectEntryId, Vec<Range<usize>>> =
|
||||
HashMap::default();
|
||||
for (identifier, references) in identifier_to_references {
|
||||
let mut import_occurrences = Vec::new();
|
||||
let mut import_paths = Vec::new();
|
||||
let mut found_external_identifier: Option<&Identifier> = None;
|
||||
declarations
|
||||
.into_iter()
|
||||
.filter_map(|(declaration_id, declaration)| match declaration {
|
||||
Declaration::Buffer {
|
||||
buffer_id,
|
||||
declaration: buffer_declaration,
|
||||
..
|
||||
} => {
|
||||
let is_same_file = buffer_id == ¤t_buffer.remote_id();
|
||||
|
||||
if let Some(imports) = imports.identifier_to_imports.get(&identifier) {
|
||||
// only use alias when it's the only import, could be generalized if some language
|
||||
// has overlapping aliases
|
||||
//
|
||||
// TODO: when an aliased declaration is included in the prompt, should include the
|
||||
// aliasing in the prompt.
|
||||
//
|
||||
// TODO: For SourceFuzzy consider having componentwise comparison that pays
|
||||
// attention to ordering.
|
||||
if let [
|
||||
Import::Alias {
|
||||
module,
|
||||
external_identifier,
|
||||
},
|
||||
] = imports.as_slice()
|
||||
{
|
||||
match module {
|
||||
Module::Namespace(namespace) => {
|
||||
import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
}
|
||||
found_external_identifier = Some(&external_identifier);
|
||||
} else {
|
||||
for import in imports {
|
||||
match import {
|
||||
Import::Direct { module } => match module {
|
||||
Module::Namespace(namespace) => {
|
||||
import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
},
|
||||
Import::Alias { .. } => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let identifier_to_lookup = found_external_identifier.unwrap_or(&identifier);
|
||||
// TODO: update this to be able to return more declarations? Especially if there is the
|
||||
// ability to quickly filter a large list (based on imports)
|
||||
let identifier_declarations = index
|
||||
.declarations_for_identifier::<MAX_IDENTIFIER_DECLARATION_COUNT>(&identifier_to_lookup);
|
||||
let declaration_count = identifier_declarations.len();
|
||||
|
||||
if declaration_count == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: option to filter out other candidates when same file / import match
|
||||
let mut checked_declarations = Vec::with_capacity(declaration_count);
|
||||
for (declaration_id, declaration) in identifier_declarations {
|
||||
match declaration {
|
||||
Declaration::Buffer {
|
||||
buffer_id,
|
||||
declaration: buffer_declaration,
|
||||
..
|
||||
} => {
|
||||
if buffer_id == ¤t_buffer.remote_id() {
|
||||
let already_included_in_prompt =
|
||||
range_intersection(&buffer_declaration.item_range, &excerpt.range)
|
||||
.is_some()
|
||||
if is_same_file {
|
||||
let overlaps_excerpt =
|
||||
range_intersection(&buffer_declaration.item_range, &excerpt.range)
|
||||
.is_some();
|
||||
if overlaps_excerpt
|
||||
|| excerpt
|
||||
.parent_declarations
|
||||
.iter()
|
||||
.any(|(excerpt_parent, _)| excerpt_parent == &declaration_id);
|
||||
if !options.omit_excerpt_overlaps || !already_included_in_prompt {
|
||||
let declaration_line = buffer_declaration
|
||||
.item_range
|
||||
.start
|
||||
.to_point(current_buffer)
|
||||
.row;
|
||||
let declaration_line_distance =
|
||||
(cursor_point.row as i32 - declaration_line as i32).unsigned_abs();
|
||||
checked_declarations.push(CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance: Some(declaration_line_distance),
|
||||
path_import_match_count: 0,
|
||||
wildcard_path_import_match_count: 0,
|
||||
});
|
||||
.any(|(excerpt_parent, _)| excerpt_parent == &declaration_id)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
let declaration_line = buffer_declaration
|
||||
.item_range
|
||||
.start
|
||||
.to_point(current_buffer)
|
||||
.row;
|
||||
Some((
|
||||
true,
|
||||
(cursor_point.row as i32 - declaration_line as i32)
|
||||
.unsigned_abs(),
|
||||
declaration,
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Some((false, u32::MAX, declaration))
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
}
|
||||
}
|
||||
Declaration::File { .. } => {}
|
||||
}
|
||||
let declaration_path = declaration.cached_path();
|
||||
let path_import_match_count = import_paths
|
||||
.iter()
|
||||
.filter(|import_path| {
|
||||
declaration_path_matches_import(&declaration_path, import_path)
|
||||
Declaration::File { .. } => {
|
||||
// We can assume that a file declaration is in a different file,
|
||||
// because the current one must be open
|
||||
Some((false, u32::MAX, declaration))
|
||||
}
|
||||
})
|
||||
.count();
|
||||
let wildcard_path_import_match_count = wildcard_import_paths
|
||||
.iter()
|
||||
.filter(|import_path| {
|
||||
declaration_path_matches_import(&declaration_path, import_path)
|
||||
})
|
||||
.count();
|
||||
checked_declarations.push(CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance: None,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
});
|
||||
}
|
||||
.sorted_by_key(|&(_, distance, _)| distance)
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
declaration_line_distance_rank,
|
||||
(is_same_file, declaration_line_distance, declaration),
|
||||
)| {
|
||||
let same_file_declaration_count = index.file_declaration_count(declaration);
|
||||
|
||||
let mut max_import_similarity = 0.0;
|
||||
let mut max_wildcard_import_similarity = 0.0;
|
||||
score_declaration(
|
||||
&identifier,
|
||||
&references,
|
||||
declaration.clone(),
|
||||
is_same_file,
|
||||
declaration_line_distance,
|
||||
declaration_line_distance_rank,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
cursor_point,
|
||||
current_buffer,
|
||||
)
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut scored_declarations_for_identifier = Vec::with_capacity(checked_declarations.len());
|
||||
for checked_declaration in checked_declarations {
|
||||
let same_file_declaration_count =
|
||||
index.file_declaration_count(checked_declaration.declaration);
|
||||
|
||||
let declaration = score_declaration(
|
||||
&identifier,
|
||||
&references,
|
||||
checked_declaration,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
&import_occurrences,
|
||||
&wildcard_import_occurrences,
|
||||
cursor_point,
|
||||
current_buffer,
|
||||
);
|
||||
|
||||
if declaration.components.import_similarity > max_import_similarity {
|
||||
max_import_similarity = declaration.components.import_similarity;
|
||||
}
|
||||
|
||||
if declaration.components.wildcard_import_similarity > max_wildcard_import_similarity {
|
||||
max_wildcard_import_similarity = declaration.components.wildcard_import_similarity;
|
||||
}
|
||||
|
||||
project_entry_id_to_outline_ranges
|
||||
.entry(declaration.declaration.project_entry_id())
|
||||
.or_default()
|
||||
.push(declaration.declaration.item_range());
|
||||
scored_declarations_for_identifier.push(declaration);
|
||||
}
|
||||
|
||||
if max_import_similarity > 0.0 || max_wildcard_import_similarity > 0.0 {
|
||||
for declaration in scored_declarations_for_identifier.iter_mut() {
|
||||
if max_import_similarity > 0.0 {
|
||||
declaration.components.max_import_similarity = max_import_similarity;
|
||||
declaration.components.normalized_import_similarity =
|
||||
declaration.components.import_similarity / max_import_similarity;
|
||||
}
|
||||
if max_wildcard_import_similarity > 0.0 {
|
||||
declaration.components.normalized_wildcard_import_similarity =
|
||||
declaration.components.wildcard_import_similarity
|
||||
/ max_wildcard_import_similarity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scored_declarations.extend(scored_declarations_for_identifier);
|
||||
}
|
||||
|
||||
// TODO: Inform this via import / retrieval scores of outline items
|
||||
// TODO: Consider using a sweepline
|
||||
for scored_declaration in scored_declarations.iter_mut() {
|
||||
let project_entry_id = scored_declaration.declaration.project_entry_id();
|
||||
let Some(ranges) = project_entry_id_to_outline_ranges.get(&project_entry_id) else {
|
||||
continue;
|
||||
};
|
||||
for range in ranges {
|
||||
if range.contains_inclusive(&scored_declaration.declaration.item_range()) {
|
||||
scored_declaration.components.included_by_others += 1
|
||||
} else if scored_declaration
|
||||
.declaration
|
||||
.item_range()
|
||||
.contains_inclusive(range)
|
||||
{
|
||||
scored_declaration.components.includes_others += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scored_declarations.sort_unstable_by_key(|declaration| {
|
||||
Reverse(OrderedFloat(
|
||||
declaration.score(DeclarationStyle::Declaration),
|
||||
))
|
||||
declarations.sort_unstable_by_key(|declaration| {
|
||||
let score_density = declaration
|
||||
.score_density(DeclarationStyle::Declaration)
|
||||
.max(declaration.score_density(DeclarationStyle::Signature));
|
||||
Reverse(OrderedFloat(score_density))
|
||||
});
|
||||
|
||||
scored_declarations
|
||||
}
|
||||
|
||||
struct CheckedDeclaration<'a> {
|
||||
declaration: &'a Declaration,
|
||||
same_file_line_distance: Option<u32>,
|
||||
path_import_match_count: usize,
|
||||
wildcard_path_import_match_count: usize,
|
||||
}
|
||||
|
||||
fn declaration_path_matches_import(
|
||||
declaration_path: &CachedDeclarationPath,
|
||||
import_path: &Arc<Path>,
|
||||
) -> bool {
|
||||
if import_path.is_absolute() {
|
||||
declaration_path.equals_absolute_path(import_path)
|
||||
} else {
|
||||
declaration_path.ends_with_posix_path(import_path)
|
||||
}
|
||||
declarations
|
||||
}
|
||||
|
||||
fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Range<T>> {
|
||||
@@ -375,23 +173,17 @@ fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Rang
|
||||
fn score_declaration(
|
||||
identifier: &Identifier,
|
||||
references: &[Reference],
|
||||
checked_declaration: CheckedDeclaration,
|
||||
declaration: Declaration,
|
||||
is_same_file: bool,
|
||||
declaration_line_distance: u32,
|
||||
declaration_line_distance_rank: usize,
|
||||
same_file_declaration_count: usize,
|
||||
declaration_count: usize,
|
||||
excerpt_occurrences: &Occurrences,
|
||||
adjacent_occurrences: &Occurrences,
|
||||
import_occurrences: &[Occurrences],
|
||||
wildcard_import_occurrences: &[Occurrences],
|
||||
cursor: Point,
|
||||
current_buffer: &BufferSnapshot,
|
||||
) -> ScoredDeclaration {
|
||||
let CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
} = checked_declaration;
|
||||
|
||||
) -> Option<ScoredDeclaration> {
|
||||
let is_referenced_nearby = references
|
||||
.iter()
|
||||
.any(|r| r.region == ReferenceRegion::Nearby);
|
||||
@@ -408,9 +200,6 @@ fn score_declaration(
|
||||
.min()
|
||||
.unwrap();
|
||||
|
||||
let is_same_file = same_file_line_distance.is_some();
|
||||
let declaration_line_distance = same_file_line_distance.unwrap_or(u32::MAX);
|
||||
|
||||
let item_source_occurrences = Occurrences::within_string(&declaration.item_text().0);
|
||||
let item_signature_occurrences = Occurrences::within_string(&declaration.signature_text().0);
|
||||
let excerpt_vs_item_jaccard = jaccard_similarity(excerpt_occurrences, &item_source_occurrences);
|
||||
@@ -430,37 +219,6 @@ fn score_declaration(
|
||||
let adjacent_vs_signature_weighted_overlap =
|
||||
weighted_overlap_coefficient(adjacent_occurrences, &item_signature_occurrences);
|
||||
|
||||
let mut import_similarity = 0f32;
|
||||
let mut wildcard_import_similarity = 0f32;
|
||||
if !import_occurrences.is_empty() || !wildcard_import_occurrences.is_empty() {
|
||||
let cached_path = declaration.cached_path();
|
||||
let path_occurrences = Occurrences::from_worktree_path(
|
||||
cached_path
|
||||
.worktree_abs_path
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy()),
|
||||
&cached_path.rel_path,
|
||||
);
|
||||
import_similarity = import_occurrences
|
||||
.iter()
|
||||
.map(|namespace_occurrences| {
|
||||
OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences))
|
||||
})
|
||||
.max()
|
||||
.map(|similarity| similarity.into_inner())
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Consider something other than max
|
||||
wildcard_import_similarity = wildcard_import_occurrences
|
||||
.iter()
|
||||
.map(|namespace_occurrences| {
|
||||
OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences))
|
||||
})
|
||||
.max()
|
||||
.map(|similarity| similarity.into_inner())
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
// TODO: Consider adding declaration_file_count
|
||||
let score_components = DeclarationScoreComponents {
|
||||
is_same_file,
|
||||
@@ -468,6 +226,7 @@ fn score_declaration(
|
||||
is_referenced_in_breadcrumb,
|
||||
reference_line_distance,
|
||||
declaration_line_distance,
|
||||
declaration_line_distance_rank,
|
||||
reference_count,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
@@ -479,61 +238,52 @@ fn score_declaration(
|
||||
excerpt_vs_signature_weighted_overlap,
|
||||
adjacent_vs_item_weighted_overlap,
|
||||
adjacent_vs_signature_weighted_overlap,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
import_similarity,
|
||||
max_import_similarity: 0.0,
|
||||
normalized_import_similarity: 0.0,
|
||||
wildcard_import_similarity,
|
||||
normalized_wildcard_import_similarity: 0.0,
|
||||
included_by_others: 0,
|
||||
includes_others: 0,
|
||||
};
|
||||
|
||||
ScoredDeclaration {
|
||||
Some(ScoredDeclaration {
|
||||
identifier: identifier.clone(),
|
||||
declaration: declaration.clone(),
|
||||
components: score_components,
|
||||
}
|
||||
declaration: declaration,
|
||||
scores: DeclarationScores::score(&score_components),
|
||||
score_components,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_declaration_path_matches() {
|
||||
let declaration_path =
|
||||
CachedDeclarationPath::new_for_test("/home/user/project", "src/maths.ts");
|
||||
impl DeclarationScores {
|
||||
fn score(components: &DeclarationScoreComponents) -> DeclarationScores {
|
||||
// TODO: handle truncation
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("maths.ts").into()
|
||||
));
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let retrieval = if components.is_same_file {
|
||||
// TODO: use declaration_line_distance_rank
|
||||
1.0 / components.same_file_declaration_count as f32
|
||||
} else {
|
||||
1.0 / components.declaration_count as f32
|
||||
};
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("project/src/maths.ts").into()
|
||||
));
|
||||
// Score related to the distance between the reference and cursor, range 0 to 1
|
||||
let distance_score = if components.is_referenced_nearby {
|
||||
1.0 / (1.0 + components.reference_line_distance as f32 / 10.0).powf(2.0)
|
||||
} else {
|
||||
// same score as ~14 lines away, rationale is to not overly penalize references from parent signatures
|
||||
0.5
|
||||
};
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("user/project/src/maths.ts").into()
|
||||
));
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("/home/user/project/src/maths.ts").into()
|
||||
));
|
||||
|
||||
assert!(!declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("other.ts").into()
|
||||
));
|
||||
|
||||
assert!(!declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("/home/user/project/src/other.ts").into()
|
||||
));
|
||||
DeclarationScores {
|
||||
signature: combined_score * components.excerpt_vs_signature_weighted_overlap,
|
||||
// declaration score gets boosted both by being multiplied by 2 and by there being more
|
||||
// weighted overlap.
|
||||
declaration: 2.0 * combined_score * components.excerpt_vs_item_weighted_overlap,
|
||||
retrieval,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
mod declaration;
|
||||
mod declaration_scoring;
|
||||
mod excerpt;
|
||||
mod imports;
|
||||
mod outline;
|
||||
mod reference;
|
||||
mod syntax_index;
|
||||
pub mod text_similarity;
|
||||
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext as _, Entity, Task};
|
||||
@@ -17,17 +16,9 @@ use text::{Point, ToOffset as _};
|
||||
pub use declaration::*;
|
||||
pub use declaration_scoring::*;
|
||||
pub use excerpt::*;
|
||||
pub use imports::*;
|
||||
pub use reference::*;
|
||||
pub use syntax_index::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct EditPredictionContextOptions {
|
||||
pub use_imports: bool,
|
||||
pub excerpt: EditPredictionExcerptOptions,
|
||||
pub score: EditPredictionScoreOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct EditPredictionContext {
|
||||
pub excerpt: EditPredictionExcerpt,
|
||||
@@ -40,34 +31,21 @@ impl EditPredictionContext {
|
||||
pub fn gather_context_in_background(
|
||||
cursor_point: Point,
|
||||
buffer: BufferSnapshot,
|
||||
options: EditPredictionContextOptions,
|
||||
excerpt_options: EditPredictionExcerptOptions,
|
||||
syntax_index: Option<Entity<SyntaxIndex>>,
|
||||
cx: &mut App,
|
||||
) -> Task<Option<Self>> {
|
||||
let parent_abs_path = project::File::from_dyn(buffer.file()).and_then(|f| {
|
||||
let mut path = f.worktree.read(cx).absolutize(&f.path);
|
||||
if path.pop() { Some(path) } else { None }
|
||||
});
|
||||
|
||||
if let Some(syntax_index) = syntax_index {
|
||||
let index_state =
|
||||
syntax_index.read_with(cx, |index, _cx| Arc::downgrade(index.state()));
|
||||
cx.background_spawn(async move {
|
||||
let parent_abs_path = parent_abs_path.as_deref();
|
||||
let index_state = index_state.upgrade()?;
|
||||
let index_state = index_state.lock().await;
|
||||
Self::gather_context(
|
||||
cursor_point,
|
||||
&buffer,
|
||||
parent_abs_path,
|
||||
&options,
|
||||
Some(&index_state),
|
||||
)
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, Some(&index_state))
|
||||
})
|
||||
} else {
|
||||
cx.background_spawn(async move {
|
||||
let parent_abs_path = parent_abs_path.as_deref();
|
||||
Self::gather_context(cursor_point, &buffer, parent_abs_path, &options, None)
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, None)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -75,20 +53,13 @@ impl EditPredictionContext {
|
||||
pub fn gather_context(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
parent_abs_path: Option<&Path>,
|
||||
options: &EditPredictionContextOptions,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
) -> Option<Self> {
|
||||
let imports = if options.use_imports {
|
||||
Imports::gather(&buffer, parent_abs_path)
|
||||
} else {
|
||||
Imports::default()
|
||||
};
|
||||
Self::gather_context_with_references_fn(
|
||||
cursor_point,
|
||||
buffer,
|
||||
&imports,
|
||||
options,
|
||||
excerpt_options,
|
||||
index_state,
|
||||
references_in_excerpt,
|
||||
)
|
||||
@@ -97,8 +68,7 @@ impl EditPredictionContext {
|
||||
pub fn gather_context_with_references_fn(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
imports: &Imports,
|
||||
options: &EditPredictionContextOptions,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
get_references: impl FnOnce(
|
||||
&EditPredictionExcerpt,
|
||||
@@ -109,7 +79,7 @@ impl EditPredictionContext {
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
buffer,
|
||||
&options.excerpt,
|
||||
excerpt_options,
|
||||
index_state,
|
||||
)?;
|
||||
let excerpt_text = excerpt.text(buffer);
|
||||
@@ -131,12 +101,10 @@ impl EditPredictionContext {
|
||||
let references = get_references(&excerpt, &excerpt_text, buffer);
|
||||
|
||||
scored_declarations(
|
||||
&options.score,
|
||||
&index_state,
|
||||
&excerpt,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
&imports,
|
||||
references,
|
||||
cursor_offset_in_file,
|
||||
buffer,
|
||||
@@ -192,18 +160,12 @@ mod tests {
|
||||
EditPredictionContext::gather_context_in_background(
|
||||
cursor_point,
|
||||
buffer_snapshot,
|
||||
EditPredictionContextOptions {
|
||||
use_imports: true,
|
||||
excerpt: EditPredictionExcerptOptions {
|
||||
max_bytes: 60,
|
||||
min_bytes: 10,
|
||||
target_before_cursor_over_total_bytes: 0.5,
|
||||
},
|
||||
score: EditPredictionScoreOptions {
|
||||
omit_excerpt_overlaps: true,
|
||||
},
|
||||
EditPredictionExcerptOptions {
|
||||
max_bytes: 60,
|
||||
min_bytes: 10,
|
||||
target_before_cursor_over_total_bytes: 0.5,
|
||||
},
|
||||
Some(index.clone()),
|
||||
Some(index),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,6 @@ use futures::lock::Mutex;
|
||||
use futures::{FutureExt as _, StreamExt, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
|
||||
use language::{Buffer, BufferEvent};
|
||||
use postage::stream::Stream as _;
|
||||
use project::buffer_store::{BufferStore, BufferStoreEvent};
|
||||
@@ -18,7 +17,6 @@ use std::sync::Arc;
|
||||
use text::BufferId;
|
||||
use util::{RangeExt as _, debug_panic, some_or_debug_panic};
|
||||
|
||||
use crate::CachedDeclarationPath;
|
||||
use crate::declaration::{
|
||||
BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
|
||||
};
|
||||
@@ -30,8 +28,6 @@ use crate::outline::declarations_in_buffer;
|
||||
// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
|
||||
//
|
||||
// * Add a per language configuration for skipping indexing.
|
||||
//
|
||||
// * Handle tsx / ts / js referencing each-other
|
||||
|
||||
// Potential future improvements:
|
||||
//
|
||||
@@ -65,7 +61,6 @@ pub struct SyntaxIndex {
|
||||
state: Arc<Mutex<SyntaxIndexState>>,
|
||||
project: WeakEntity<Project>,
|
||||
initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
pub struct SyntaxIndexState {
|
||||
@@ -75,6 +70,7 @@ pub struct SyntaxIndexState {
|
||||
buffers: HashMap<BufferId, BufferState>,
|
||||
dirty_files: HashMap<ProjectEntryId, ProjectPath>,
|
||||
dirty_files_tx: mpsc::Sender<()>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
@@ -106,12 +102,12 @@ impl SyntaxIndex {
|
||||
buffers: HashMap::default(),
|
||||
dirty_files: HashMap::default(),
|
||||
dirty_files_tx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
let mut this = Self {
|
||||
let this = Self {
|
||||
project: project.downgrade(),
|
||||
state: Arc::new(Mutex::new(initial_state)),
|
||||
initial_file_indexing_done_rx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
|
||||
let worktree_store = project.read(cx).worktree_store();
|
||||
@@ -120,77 +116,75 @@ impl SyntaxIndex {
|
||||
.worktrees()
|
||||
.map(|w| w.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
this._file_indexing_task = Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
if snapshots_file_count > 0 {
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
if !initial_worktree_snapshots.is_empty() {
|
||||
this.state.try_lock().unwrap()._file_indexing_task =
|
||||
Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
.chunks(chunk_size);
|
||||
.chunks(chunk_size);
|
||||
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
log::info!("Finished initial file indexing");
|
||||
}
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
log::info!("Finished initial file indexing");
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
@@ -370,9 +364,7 @@ impl SyntaxIndex {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
BufferEvent::Edited |
|
||||
// paths are cached and so should be updated
|
||||
BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx),
|
||||
BufferEvent::Edited => self.update_buffer(buffer, cx),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -383,16 +375,8 @@ impl SyntaxIndex {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file())
|
||||
.and_then(|f| {
|
||||
let project_entry_id = f.project_entry_id()?;
|
||||
let cached_path = CachedDeclarationPath::new(
|
||||
f.worktree.read(cx).abs_path(),
|
||||
&f.path,
|
||||
buffer.language(),
|
||||
);
|
||||
Some((project_entry_id, cached_path))
|
||||
})
|
||||
let Some(project_entry_id) =
|
||||
project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
@@ -456,7 +440,6 @@ impl SyntaxIndex {
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
cached_path: cached_path.clone(),
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
@@ -524,14 +507,13 @@ impl SyntaxIndex {
|
||||
|
||||
let snapshot_task = worktree.update(cx, |worktree, cx| {
|
||||
let load_task = worktree.load_file(&project_path.path, cx);
|
||||
let worktree_abs_path = worktree.abs_path();
|
||||
cx.spawn(async move |_this, cx| {
|
||||
let loaded_file = load_task.await?;
|
||||
let language = language.await?;
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
let mut buffer = Buffer::local(loaded_file.text, cx);
|
||||
buffer.set_language(Some(language.clone()), cx);
|
||||
buffer.set_language(Some(language), cx);
|
||||
buffer
|
||||
})?;
|
||||
|
||||
@@ -540,22 +522,14 @@ impl SyntaxIndex {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
let cached_path = CachedDeclarationPath::new(
|
||||
worktree_abs_path,
|
||||
&project_path.path,
|
||||
Some(&language),
|
||||
);
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
anyhow::Ok((snapshot, cached_path))
|
||||
buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
|
||||
})
|
||||
});
|
||||
|
||||
let state = Arc::downgrade(&self.state);
|
||||
cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok((snapshot, cached_path)) = snapshot_task.await else {
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.as_rope();
|
||||
@@ -593,7 +567,6 @@ impl SyntaxIndex {
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
cached_path: cached_path.clone(),
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
@@ -948,7 +921,6 @@ mod tests {
|
||||
if let Declaration::File {
|
||||
declaration,
|
||||
project_entry_id: file,
|
||||
..
|
||||
} = declaration
|
||||
{
|
||||
assert_eq!(
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
use hashbrown::HashTable;
|
||||
use regex::Regex;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
hash::{Hash, Hasher as _},
|
||||
path::Path,
|
||||
sync::LazyLock,
|
||||
};
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
use crate::reference::Reference;
|
||||
|
||||
@@ -48,34 +45,19 @@ impl Occurrences {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn from_identifiers(identifiers: impl IntoIterator<Item = impl AsRef<str>>) -> Self {
|
||||
pub fn from_identifiers<'a>(identifiers: impl IntoIterator<Item = &'a str>) -> Self {
|
||||
let mut this = Self::default();
|
||||
// TODO: Score matches that match case higher?
|
||||
//
|
||||
// TODO: Also include unsplit identifier?
|
||||
for identifier in identifiers {
|
||||
for identifier_part in split_identifier(identifier.as_ref()) {
|
||||
for identifier_part in split_identifier(identifier) {
|
||||
this.add_hash(fx_hash(&identifier_part.to_lowercase()));
|
||||
}
|
||||
}
|
||||
this
|
||||
}
|
||||
|
||||
pub fn from_worktree_path(worktree_name: Option<Cow<'_, str>>, rel_path: &RelPath) -> Self {
|
||||
if let Some(worktree_name) = worktree_name {
|
||||
Self::from_identifiers(
|
||||
std::iter::once(worktree_name)
|
||||
.chain(iter_path_without_extension(rel_path.as_std_path())),
|
||||
)
|
||||
} else {
|
||||
Self::from_path(rel_path.as_std_path())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_path(path: &Path) -> Self {
|
||||
Self::from_identifiers(iter_path_without_extension(path))
|
||||
}
|
||||
|
||||
fn add_hash(&mut self, hash: u64) {
|
||||
self.table
|
||||
.entry(
|
||||
@@ -100,15 +82,6 @@ impl Occurrences {
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_path_without_extension(path: &Path) -> impl Iterator<Item = Cow<'_, str>> {
|
||||
let last_component: Option<Cow<'_, str>> = path.file_stem().map(|stem| stem.to_string_lossy());
|
||||
let mut path_components = path.components();
|
||||
path_components.next_back();
|
||||
path_components
|
||||
.map(|component| component.as_os_str().to_string_lossy())
|
||||
.chain(last_component)
|
||||
}
|
||||
|
||||
pub fn fx_hash<T: Hash + ?Sized>(data: &T) -> u64 {
|
||||
let mut hasher = collections::FxHasher::default();
|
||||
data.hash(&mut hasher);
|
||||
@@ -296,19 +269,4 @@ mod test {
|
||||
// the smaller set, 10.
|
||||
assert_eq!(weighted_overlap_coefficient(&set_a, &set_b), 7.0 / 10.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter_path_without_extension() {
|
||||
let mut iter = iter_path_without_extension(Path::new(""));
|
||||
assert_eq!(iter.next(), None);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo"]);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo/bar.txt"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo", "bar"]);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo/bar/baz.txt"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo", "bar", "baz"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -456,33 +456,6 @@ actions!(
|
||||
Fold,
|
||||
/// Folds all foldable regions in the editor.
|
||||
FoldAll,
|
||||
/// Folds all code blocks at indentation level 1.
|
||||
#[action(name = "FoldAtLevel_1")]
|
||||
FoldAtLevel1,
|
||||
/// Folds all code blocks at indentation level 2.
|
||||
#[action(name = "FoldAtLevel_2")]
|
||||
FoldAtLevel2,
|
||||
/// Folds all code blocks at indentation level 3.
|
||||
#[action(name = "FoldAtLevel_3")]
|
||||
FoldAtLevel3,
|
||||
/// Folds all code blocks at indentation level 4.
|
||||
#[action(name = "FoldAtLevel_4")]
|
||||
FoldAtLevel4,
|
||||
/// Folds all code blocks at indentation level 5.
|
||||
#[action(name = "FoldAtLevel_5")]
|
||||
FoldAtLevel5,
|
||||
/// Folds all code blocks at indentation level 6.
|
||||
#[action(name = "FoldAtLevel_6")]
|
||||
FoldAtLevel6,
|
||||
/// Folds all code blocks at indentation level 7.
|
||||
#[action(name = "FoldAtLevel_7")]
|
||||
FoldAtLevel7,
|
||||
/// Folds all code blocks at indentation level 8.
|
||||
#[action(name = "FoldAtLevel_8")]
|
||||
FoldAtLevel8,
|
||||
/// Folds all code blocks at indentation level 9.
|
||||
#[action(name = "FoldAtLevel_9")]
|
||||
FoldAtLevel9,
|
||||
/// Folds all function bodies in the editor.
|
||||
FoldFunctionBodies,
|
||||
/// Folds the current code block and all its children.
|
||||
|
||||
@@ -1518,7 +1518,6 @@ impl CodeActionsMenu {
|
||||
this.child(
|
||||
h_flex()
|
||||
.overflow_hidden()
|
||||
.when(is_quick_action_bar, |this| this.text_ui(cx))
|
||||
.child(task.resolved_label.replace("\n", ""))
|
||||
.when(selected, |this| {
|
||||
this.text_color(colors.text_accent)
|
||||
@@ -1529,7 +1528,6 @@ impl CodeActionsMenu {
|
||||
this.child(
|
||||
h_flex()
|
||||
.overflow_hidden()
|
||||
.when(is_quick_action_bar, |this| this.text_ui(cx))
|
||||
.child("debug: ")
|
||||
.child(scenario.label.clone())
|
||||
.when(selected, |this| {
|
||||
|
||||
@@ -689,7 +689,6 @@ impl BlockMap {
|
||||
|
||||
// For each of these blocks, insert a new isomorphic transform preceding the block,
|
||||
// and then insert the block itself.
|
||||
let mut just_processed_folded_buffer = false;
|
||||
for (block_placement, block) in blocks_in_edit.drain(..) {
|
||||
let mut summary = TransformSummary {
|
||||
input_rows: 0,
|
||||
@@ -702,12 +701,8 @@ impl BlockMap {
|
||||
match block_placement {
|
||||
BlockPlacement::Above(position) => {
|
||||
rows_before_block = position.0 - new_transforms.summary().input_rows;
|
||||
just_processed_folded_buffer = false;
|
||||
}
|
||||
BlockPlacement::Near(position) | BlockPlacement::Below(position) => {
|
||||
if just_processed_folded_buffer {
|
||||
continue;
|
||||
}
|
||||
if position.0 + 1 < new_transforms.summary().input_rows {
|
||||
continue;
|
||||
}
|
||||
@@ -716,7 +711,6 @@ impl BlockMap {
|
||||
BlockPlacement::Replace(range) => {
|
||||
rows_before_block = range.start().0 - new_transforms.summary().input_rows;
|
||||
summary.input_rows = range.end().0 - range.start().0 + 1;
|
||||
just_processed_folded_buffer = matches!(block, Block::FoldedBuffer { .. });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3572,96 +3566,6 @@ mod tests {
|
||||
assert_eq!(blocks_snapshot.text(), "abc\n\ndef\nghi\njkl\nmno");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_folded_buffer_with_near_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "line 1\nline 2\nline 3";
|
||||
let buffer = cx.update(|cx| {
|
||||
MultiBuffer::build_multi([(text, vec![Point::new(0, 0)..Point::new(2, 6)])], cx)
|
||||
});
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let buffer_ids = buffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(buffer_ids.len(), 1);
|
||||
let buffer_id = buffer_ids[0];
|
||||
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wrap_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
writer.insert(vec![BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Near(buffer_snapshot.anchor_after(Point::new(0, 0))),
|
||||
height: Some(1),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "\nline 1\n\nline 2\nline 3");
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
writer.fold_buffers([buffer_id], buffer, cx);
|
||||
});
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_folded_buffer_with_near_blocks_on_last_line(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "line 1\nline 2\nline 3\nline 4";
|
||||
let buffer = cx.update(|cx| {
|
||||
MultiBuffer::build_multi([(text, vec![Point::new(0, 0)..Point::new(3, 6)])], cx)
|
||||
});
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let buffer_ids = buffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(buffer_ids.len(), 1);
|
||||
let buffer_id = buffer_ids[0];
|
||||
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wrap_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
writer.insert(vec![BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Near(buffer_snapshot.anchor_after(Point::new(3, 6))),
|
||||
height: Some(1),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "\nline 1\nline 2\nline 3\nline 4\n");
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
writer.fold_buffers([buffer_id], buffer, cx);
|
||||
});
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "");
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut gpui::App) {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
|
||||
@@ -226,7 +226,6 @@ pub const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis
|
||||
pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
|
||||
pub const FETCH_COLORS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150);
|
||||
|
||||
pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction";
|
||||
pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict";
|
||||
@@ -1190,7 +1189,6 @@ pub struct Editor {
|
||||
inline_value_cache: InlineValueCache,
|
||||
selection_drag_state: SelectionDragState,
|
||||
colors: Option<LspColorData>,
|
||||
refresh_colors_task: Task<()>,
|
||||
folding_newlines: Task<()>,
|
||||
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
|
||||
}
|
||||
@@ -2246,7 +2244,6 @@ impl Editor {
|
||||
tasks_update_task: None,
|
||||
pull_diagnostics_task: Task::ready(()),
|
||||
colors: None,
|
||||
refresh_colors_task: Task::ready(()),
|
||||
next_color_inlay_id: 0,
|
||||
linked_edit_ranges: Default::default(),
|
||||
in_project_search: false,
|
||||
@@ -3175,7 +3172,7 @@ impl Editor {
|
||||
self.refresh_code_actions(window, cx);
|
||||
self.refresh_document_highlights(cx);
|
||||
self.refresh_selected_text_highlights(false, window, cx);
|
||||
refresh_matching_bracket_highlights(self, cx);
|
||||
refresh_matching_bracket_highlights(self, window, cx);
|
||||
self.update_visible_edit_prediction(window, cx);
|
||||
self.edit_prediction_requires_modifier_in_indent_conflict = true;
|
||||
linked_editing_ranges::refresh_linked_ranges(self, window, cx);
|
||||
@@ -3514,46 +3511,26 @@ impl Editor {
|
||||
) {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let tail = self.selections.newest::<usize>(cx).tail();
|
||||
let click_count = click_count.max(match self.selections.select_mode() {
|
||||
SelectMode::Character => 1,
|
||||
SelectMode::Word(_) => 2,
|
||||
SelectMode::Line(_) => 3,
|
||||
SelectMode::All => 4,
|
||||
});
|
||||
self.begin_selection(position, false, click_count, window, cx);
|
||||
|
||||
let position = position.to_offset(&display_map, Bias::Left);
|
||||
let tail_anchor = display_map.buffer_snapshot().anchor_before(tail);
|
||||
|
||||
let current_selection = match self.selections.select_mode() {
|
||||
SelectMode::Character | SelectMode::All => tail_anchor..tail_anchor,
|
||||
SelectMode::Word(range) | SelectMode::Line(range) => range.clone(),
|
||||
};
|
||||
|
||||
let mut pending_selection = self
|
||||
.selections
|
||||
.pending_anchor()
|
||||
.cloned()
|
||||
.expect("extend_selection not called with pending selection");
|
||||
|
||||
if pending_selection
|
||||
.start
|
||||
.cmp(¤t_selection.start, display_map.buffer_snapshot())
|
||||
== Ordering::Greater
|
||||
{
|
||||
pending_selection.start = current_selection.start;
|
||||
}
|
||||
if pending_selection
|
||||
.end
|
||||
.cmp(¤t_selection.end, display_map.buffer_snapshot())
|
||||
== Ordering::Less
|
||||
{
|
||||
pending_selection.end = current_selection.end;
|
||||
if position >= tail {
|
||||
pending_selection.start = tail_anchor;
|
||||
} else {
|
||||
pending_selection.end = tail_anchor;
|
||||
pending_selection.reversed = true;
|
||||
}
|
||||
|
||||
let mut pending_mode = self.selections.pending_mode().unwrap();
|
||||
match &mut pending_mode {
|
||||
SelectMode::Word(range) | SelectMode::Line(range) => *range = current_selection,
|
||||
SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -3564,8 +3541,7 @@ impl Editor {
|
||||
};
|
||||
|
||||
self.change_selections(effects, window, cx, |s| {
|
||||
s.set_pending(pending_selection.clone(), pending_mode);
|
||||
s.set_is_extending(true);
|
||||
s.set_pending(pending_selection.clone(), pending_mode)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3834,16 +3810,11 @@ impl Editor {
|
||||
|
||||
fn end_selection(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.columnar_selection_state.take();
|
||||
if let Some(pending_mode) = self.selections.pending_mode() {
|
||||
if self.selections.pending_anchor().is_some() {
|
||||
let selections = self.selections.all::<usize>(cx);
|
||||
self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select(selections);
|
||||
s.clear_pending();
|
||||
if s.is_extending() {
|
||||
s.set_is_extending(false);
|
||||
} else {
|
||||
s.set_select_mode(pending_mode);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -5243,7 +5214,15 @@ impl Editor {
|
||||
if enabled {
|
||||
(InvalidationStrategy::RefreshRequested, None)
|
||||
} else {
|
||||
self.clear_inlay_hints(cx);
|
||||
self.splice_inlays(
|
||||
&self
|
||||
.visible_inlay_hints(cx)
|
||||
.iter()
|
||||
.map(|inlay| inlay.id)
|
||||
.collect::<Vec<InlayId>>(),
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -5255,7 +5234,15 @@ impl Editor {
|
||||
if enabled {
|
||||
(InvalidationStrategy::RefreshRequested, None)
|
||||
} else {
|
||||
self.clear_inlay_hints(cx);
|
||||
self.splice_inlays(
|
||||
&self
|
||||
.visible_inlay_hints(cx)
|
||||
.iter()
|
||||
.map(|inlay| inlay.id)
|
||||
.collect::<Vec<InlayId>>(),
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
@@ -5266,7 +5253,7 @@ impl Editor {
|
||||
match self.inlay_hint_cache.update_settings(
|
||||
&self.buffer,
|
||||
new_settings,
|
||||
self.visible_inlay_hints(cx).cloned().collect::<Vec<_>>(),
|
||||
self.visible_inlay_hints(cx),
|
||||
cx,
|
||||
) {
|
||||
ControlFlow::Break(Some(InlaySplice {
|
||||
@@ -5316,25 +5303,13 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear_inlay_hints(&self, cx: &mut Context<Editor>) {
|
||||
self.splice_inlays(
|
||||
&self
|
||||
.visible_inlay_hints(cx)
|
||||
.map(|inlay| inlay.id)
|
||||
.collect::<Vec<_>>(),
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn visible_inlay_hints<'a>(
|
||||
&'a self,
|
||||
cx: &'a Context<Editor>,
|
||||
) -> impl Iterator<Item = &'a Inlay> {
|
||||
fn visible_inlay_hints(&self, cx: &Context<Editor>) -> Vec<Inlay> {
|
||||
self.display_map
|
||||
.read(cx)
|
||||
.current_inlays()
|
||||
.filter(move |inlay| matches!(inlay.id, InlayId::Hint(_)))
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn visible_excerpts(
|
||||
@@ -5368,7 +5343,7 @@ impl Editor {
|
||||
let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?;
|
||||
let worktree_entry = buffer_worktree
|
||||
.read(cx)
|
||||
.entry_for_id(buffer_file.project_entry_id()?)?;
|
||||
.entry_for_id(buffer_file.project_entry_id(cx)?)?;
|
||||
if worktree_entry.is_ignored {
|
||||
return None;
|
||||
}
|
||||
@@ -6632,32 +6607,26 @@ impl Editor {
|
||||
&self.context_menu
|
||||
}
|
||||
|
||||
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) -> Option<()> {
|
||||
let newest_selection = self.selections.newest_anchor().clone();
|
||||
let newest_selection_adjusted = self.selections.newest_adjusted(cx);
|
||||
let buffer = self.buffer.read(cx);
|
||||
if newest_selection.head().diff_base_anchor.is_some() {
|
||||
return None;
|
||||
}
|
||||
let (start_buffer, start) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?;
|
||||
let (end_buffer, end) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?;
|
||||
if start_buffer != end_buffer {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.code_actions_task = Some(cx.spawn_in(window, async move |this, cx| {
|
||||
cx.background_executor()
|
||||
.timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT)
|
||||
.await;
|
||||
|
||||
let (start_buffer, start, _, end, newest_selection) = this
|
||||
.update(cx, |this, cx| {
|
||||
let newest_selection = this.selections.newest_anchor().clone();
|
||||
if newest_selection.head().diff_base_anchor.is_some() {
|
||||
return None;
|
||||
}
|
||||
let newest_selection_adjusted = this.selections.newest_adjusted(cx);
|
||||
let buffer = this.buffer.read(cx);
|
||||
|
||||
let (start_buffer, start) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?;
|
||||
let (end_buffer, end) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?;
|
||||
|
||||
Some((start_buffer, start, end_buffer, end, newest_selection))
|
||||
})?
|
||||
.filter(|(start_buffer, _, end_buffer, _, _)| start_buffer == end_buffer)
|
||||
.context(
|
||||
"Expected selection to lie in a single buffer when refreshing code actions",
|
||||
)?;
|
||||
let (providers, tasks) = this.update_in(cx, |this, window, cx| {
|
||||
let providers = this.code_action_providers.clone();
|
||||
let tasks = this
|
||||
@@ -6698,6 +6667,7 @@ impl Editor {
|
||||
cx.notify();
|
||||
})
|
||||
}));
|
||||
None
|
||||
}
|
||||
|
||||
fn start_inline_blame_timer(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -6947,24 +6917,19 @@ impl Editor {
|
||||
if self.selections.count() != 1 || self.selections.line_mode() {
|
||||
return None;
|
||||
}
|
||||
let selection = self.selections.newest_anchor();
|
||||
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
|
||||
let selection_point_range = selection.start.to_point(&multi_buffer_snapshot)
|
||||
..selection.end.to_point(&multi_buffer_snapshot);
|
||||
// If the selection spans multiple rows OR it is empty
|
||||
if selection_point_range.start.row != selection_point_range.end.row
|
||||
|| selection_point_range.start.column == selection_point_range.end.column
|
||||
{
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
if selection.is_empty() || selection.start.row != selection.end.row {
|
||||
return None;
|
||||
}
|
||||
|
||||
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
|
||||
let selection_anchor_range = selection.range().to_anchors(&multi_buffer_snapshot);
|
||||
let query = multi_buffer_snapshot
|
||||
.text_for_range(selection.range())
|
||||
.text_for_range(selection_anchor_range.clone())
|
||||
.collect::<String>();
|
||||
if query.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((query, selection.range()))
|
||||
Some((query, selection_anchor_range))
|
||||
}
|
||||
|
||||
fn update_selection_occurrence_highlights(
|
||||
@@ -10495,33 +10460,29 @@ impl Editor {
|
||||
|
||||
let buffer = display_map.buffer_snapshot();
|
||||
let mut edit_start = ToOffset::to_offset(&Point::new(rows.start.0, 0), buffer);
|
||||
let (edit_end, target_row) = if buffer.max_point().row >= rows.end.0 {
|
||||
let edit_end = if buffer.max_point().row >= rows.end.0 {
|
||||
// If there's a line after the range, delete the \n from the end of the row range
|
||||
(
|
||||
ToOffset::to_offset(&Point::new(rows.end.0, 0), buffer),
|
||||
rows.end,
|
||||
)
|
||||
ToOffset::to_offset(&Point::new(rows.end.0, 0), buffer)
|
||||
} else {
|
||||
// If there isn't a line after the range, delete the \n from the line before the
|
||||
// start of the row range
|
||||
edit_start = edit_start.saturating_sub(1);
|
||||
(buffer.len(), rows.start.previous_row())
|
||||
buffer.len()
|
||||
};
|
||||
|
||||
let text_layout_details = self.text_layout_details(window);
|
||||
let x = display_map.x_for_display_point(
|
||||
let (cursor, goal) = movement::down_by_rows(
|
||||
&display_map,
|
||||
selection.head().to_display_point(&display_map),
|
||||
&text_layout_details,
|
||||
rows.len() as u32,
|
||||
selection.goal,
|
||||
false,
|
||||
&self.text_layout_details(window),
|
||||
);
|
||||
let row = Point::new(target_row.0, 0)
|
||||
.to_display_point(&display_map)
|
||||
.row();
|
||||
let column = display_map.display_column_for_x(row, x, &text_layout_details);
|
||||
|
||||
new_cursors.push((
|
||||
selection.id,
|
||||
buffer.anchor_after(DisplayPoint::new(row, column).to_point(&display_map)),
|
||||
SelectionGoal::None,
|
||||
buffer.anchor_after(cursor.to_point(&display_map)),
|
||||
goal,
|
||||
));
|
||||
edit_ranges.push(edit_start..edit_end);
|
||||
}
|
||||
@@ -11726,26 +11687,13 @@ impl Editor {
|
||||
rows.end.previous_row().0,
|
||||
buffer.line_len(rows.end.previous_row()),
|
||||
);
|
||||
|
||||
let mut text = buffer.text_for_range(start..end).collect::<String>();
|
||||
|
||||
let text = buffer
|
||||
.text_for_range(start..end)
|
||||
.chain(Some("\n"))
|
||||
.collect::<String>();
|
||||
let insert_location = if upwards {
|
||||
// When duplicating upward, we need to insert before the current line.
|
||||
// If we're on the last line and it doesn't end with a newline,
|
||||
// we need to add a newline before the duplicated content.
|
||||
let needs_leading_newline = rows.end.0 >= buffer.max_point().row
|
||||
&& buffer.max_point().column > 0
|
||||
&& !text.ends_with('\n');
|
||||
|
||||
if needs_leading_newline {
|
||||
text.insert(0, '\n');
|
||||
end
|
||||
} else {
|
||||
text.push('\n');
|
||||
Point::new(rows.end.0, 0)
|
||||
}
|
||||
Point::new(rows.end.0, 0)
|
||||
} else {
|
||||
text.push('\n');
|
||||
start
|
||||
};
|
||||
edits.push((insert_location..insert_location, text));
|
||||
@@ -12555,18 +12503,9 @@ impl Editor {
|
||||
let mut start = selection.start;
|
||||
let mut end = selection.end;
|
||||
let is_entire_line = selection.is_empty() || self.selections.line_mode();
|
||||
let mut add_trailing_newline = false;
|
||||
if is_entire_line {
|
||||
start = Point::new(start.row, 0);
|
||||
let next_line_start = Point::new(end.row + 1, 0);
|
||||
if next_line_start <= max_point {
|
||||
end = next_line_start;
|
||||
} else {
|
||||
// We're on the last line without a trailing newline.
|
||||
// Copy to the end of the line and add a newline afterwards.
|
||||
end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row)));
|
||||
add_trailing_newline = true;
|
||||
}
|
||||
end = cmp::min(max_point, Point::new(end.row + 1, 0));
|
||||
}
|
||||
|
||||
let mut trimmed_selections = Vec::new();
|
||||
@@ -12617,10 +12556,6 @@ impl Editor {
|
||||
text.push_str(chunk);
|
||||
len += chunk.len();
|
||||
}
|
||||
if add_trailing_newline {
|
||||
text.push('\n');
|
||||
len += 1;
|
||||
}
|
||||
clipboard_selections.push(ClipboardSelection {
|
||||
len,
|
||||
is_entire_line,
|
||||
@@ -14448,10 +14383,6 @@ impl Editor {
|
||||
let last_selection = selections.iter().max_by_key(|s| s.id).unwrap();
|
||||
let mut next_selected_range = None;
|
||||
|
||||
// Collect and sort selection ranges for efficient overlap checking
|
||||
let mut selection_ranges: Vec<_> = selections.iter().map(|s| s.range()).collect();
|
||||
selection_ranges.sort_by_key(|r| r.start);
|
||||
|
||||
let bytes_after_last_selection =
|
||||
buffer.bytes_in_range(last_selection.end..buffer.len());
|
||||
let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start);
|
||||
@@ -14473,20 +14404,11 @@ impl Editor {
|
||||
|| (!buffer.is_inside_word(offset_range.start, None)
|
||||
&& !buffer.is_inside_word(offset_range.end, None))
|
||||
{
|
||||
// Use binary search to check for overlap (O(log n))
|
||||
let overlaps = selection_ranges
|
||||
.binary_search_by(|range| {
|
||||
if range.end <= offset_range.start {
|
||||
std::cmp::Ordering::Less
|
||||
} else if range.start >= offset_range.end {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
std::cmp::Ordering::Equal
|
||||
}
|
||||
})
|
||||
.is_ok();
|
||||
|
||||
if !overlaps {
|
||||
// TODO: This is n^2, because we might check all the selections
|
||||
if !selections
|
||||
.iter()
|
||||
.any(|selection| selection.range().overlaps(&offset_range))
|
||||
{
|
||||
next_selected_range = Some(offset_range);
|
||||
break;
|
||||
}
|
||||
@@ -18248,87 +18170,6 @@ impl Editor {
|
||||
self.fold_creases(to_fold, true, window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_1(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel1,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(1), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_2(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel2,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(2), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_3(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel3,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(3), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_4(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel4,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(4), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_5(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel5,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(5), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_6(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel6,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(6), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_7(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel7,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(7), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_8(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel8,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(8), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_9(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel9,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(9), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_all(&mut self, _: &actions::FoldAll, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.buffer.read(cx).is_singleton() {
|
||||
let mut fold_ranges = Vec::new();
|
||||
@@ -20857,7 +20698,7 @@ impl Editor {
|
||||
self.refresh_code_actions(window, cx);
|
||||
self.refresh_selected_text_highlights(true, window, cx);
|
||||
self.refresh_single_line_folds(window, cx);
|
||||
refresh_matching_bracket_highlights(self, cx);
|
||||
refresh_matching_bracket_highlights(self, window, cx);
|
||||
if self.has_active_edit_prediction() {
|
||||
self.update_visible_edit_prediction(window, cx);
|
||||
}
|
||||
@@ -24687,7 +24528,7 @@ impl Render for MissingEditPredictionKeybindingTooltip {
|
||||
.items_end()
|
||||
.w_full()
|
||||
.child(Button::new("open-keymap", "Assign Keybinding").size(ButtonSize::Compact).on_click(|_ev, window, cx| {
|
||||
window.dispatch_action(zed_actions::OpenKeymapFile.boxed_clone(), cx)
|
||||
window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx)
|
||||
}))
|
||||
.child(Button::new("see-docs", "See Docs").size(ButtonSize::Compact).on_click(|_ev, _window, cx| {
|
||||
cx.open_url("https://zed.dev/docs/completions#edit-predictions-missing-keybinding");
|
||||
|
||||
@@ -267,7 +267,7 @@ impl Settings for EditorSettings {
|
||||
delay: drag_and_drop_selection.delay.unwrap(),
|
||||
},
|
||||
lsp_document_colors: editor.lsp_document_colors.unwrap(),
|
||||
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0,
|
||||
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -619,93 +619,6 @@ fn test_movement_actions_with_pending_selection(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_extending_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("aaa bbb ccc ddd eee", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
|
||||
_ = editor.update(cx, |editor, window, cx| {
|
||||
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), false, 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 5)]
|
||||
);
|
||||
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 10)]
|
||||
);
|
||||
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 2, window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 11)]
|
||||
);
|
||||
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(0), 1),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 0)]
|
||||
);
|
||||
|
||||
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 2, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)]
|
||||
);
|
||||
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 11)]
|
||||
);
|
||||
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(0), 6),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)]
|
||||
);
|
||||
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(0), 1),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 7)..DisplayPoint::new(DisplayRow(0), 0)]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_clone(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -4387,8 +4300,8 @@ fn test_delete_line(cx: &mut TestAppContext) {
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![
|
||||
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
|
||||
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1),
|
||||
DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3),
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -4410,24 +4323,6 @@ fn test_delete_line(cx: &mut TestAppContext) {
|
||||
vec![DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1)]
|
||||
);
|
||||
});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n\njkl\nmno", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
_ = editor.update(cx, |editor, window, cx| {
|
||||
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(2), 1)
|
||||
])
|
||||
});
|
||||
editor.delete_line(&DeleteLine, window, cx);
|
||||
assert_eq!(editor.display_text(cx), "\njkl\nmno");
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -12521,6 +12416,11 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
.join("\n"),
|
||||
);
|
||||
|
||||
// Submit a format request.
|
||||
let format = cx
|
||||
.update_editor(|editor, window, cx| editor.format(&Format, window, cx))
|
||||
.unwrap();
|
||||
|
||||
// Record which buffer changes have been sent to the language server
|
||||
let buffer_changes = Arc::new(Mutex::new(Vec::new()));
|
||||
cx.lsp
|
||||
@@ -12541,29 +12441,28 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
.set_request_handler::<lsp::request::Formatting, _, _>({
|
||||
let buffer_changes = buffer_changes.clone();
|
||||
move |_, _| {
|
||||
let buffer_changes = buffer_changes.clone();
|
||||
// When formatting is requested, trailing whitespace has already been stripped,
|
||||
// and the trailing newline has already been added.
|
||||
assert_eq!(
|
||||
&buffer_changes.lock()[1..],
|
||||
&[
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
|
||||
"\n".into()
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
// Insert blank lines between each line of the buffer.
|
||||
async move {
|
||||
// When formatting is requested, trailing whitespace has already been stripped,
|
||||
// and the trailing newline has already been added.
|
||||
assert_eq!(
|
||||
&buffer_changes.lock()[1..],
|
||||
&[
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
|
||||
"\n".into()
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
Ok(Some(vec![
|
||||
lsp::TextEdit {
|
||||
range: lsp::Range::new(
|
||||
@@ -12584,17 +12483,10 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
}
|
||||
});
|
||||
|
||||
// Submit a format request.
|
||||
let format = cx
|
||||
.update_editor(|editor, window, cx| editor.format(&Format, window, cx))
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
// After formatting the buffer, the trailing whitespace is stripped,
|
||||
// a newline is appended, and the edits provided by the language server
|
||||
// have been applied.
|
||||
format.await.unwrap();
|
||||
|
||||
cx.assert_editor_state(
|
||||
&[
|
||||
"one", //
|
||||
@@ -16623,7 +16515,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
leader.update(cx, |leader, cx| {
|
||||
leader.buffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()),
|
||||
PathKey::namespaced(1, rel_path("b.txt").into_arc()),
|
||||
buffer_1.clone(),
|
||||
vec![
|
||||
Point::row_range(0..3),
|
||||
@@ -16634,7 +16526,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
cx,
|
||||
);
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(1, rel_path("a.txt").into_arc()),
|
||||
PathKey::namespaced(1, rel_path("a.txt").into_arc()),
|
||||
buffer_2.clone(),
|
||||
vec![Point::row_range(0..6), Point::row_range(8..12)],
|
||||
0,
|
||||
@@ -21137,7 +21029,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
|
||||
for buffer in &buffers {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
buffer.clone(),
|
||||
vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
|
||||
2,
|
||||
@@ -25811,7 +25703,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
.set_request_handler::<lsp::request::DocumentColor, _, _>(move |_, _| async move {
|
||||
panic!("Should not be called");
|
||||
});
|
||||
cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
cx.executor().advance_clock(Duration::from_millis(100));
|
||||
color_request_handle.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -25895,9 +25787,9 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
color_request_handle.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
2,
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Should query for colors once per save (deduplicated) and once per formatting after save"
|
||||
"Should query for colors once per save and once per formatting after save"
|
||||
);
|
||||
|
||||
drop(editor);
|
||||
@@ -25918,7 +25810,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
.unwrap();
|
||||
close.await.unwrap();
|
||||
assert_eq!(
|
||||
2,
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"After saving and closing all editors, no extra requests should be made"
|
||||
);
|
||||
@@ -25938,7 +25830,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
cx.executor().advance_clock(Duration::from_millis(100));
|
||||
cx.run_until_parked();
|
||||
let editor = workspace
|
||||
.update(cx, |workspace, _, cx| {
|
||||
@@ -25949,9 +25841,9 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
.expect("Should be an editor")
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
color_request_handle.next().await.unwrap();
|
||||
assert_eq!(
|
||||
2,
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Cache should be reused on buffer close and reopen"
|
||||
);
|
||||
@@ -25992,11 +25884,10 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
});
|
||||
save.await.unwrap();
|
||||
|
||||
cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
empty_color_request_handle.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
3,
|
||||
4,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Should query for colors once per save only, as formatting was not requested"
|
||||
);
|
||||
@@ -26584,64 +26475,3 @@ fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec<Rgba> {
|
||||
.map(Rgba::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_duplicate_line_up_on_last_line_without_newline(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("line1\nline2", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
|
||||
editor
|
||||
.update(cx, |editor, window, cx| {
|
||||
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)
|
||||
])
|
||||
});
|
||||
|
||||
editor.duplicate_line_up(&DuplicateLineUp, window, cx);
|
||||
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"line1\nline2\nline2",
|
||||
"Duplicating last line upward should create duplicate above, not on same line"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)],
|
||||
"Selection should remain on the original line"
|
||||
);
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_line_without_trailing_newline(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.set_state("line1\nline2ˇ");
|
||||
|
||||
cx.update_editor(|e, window, cx| e.copy(&Copy, window, cx));
|
||||
|
||||
let clipboard_text = cx
|
||||
.read_from_clipboard()
|
||||
.and_then(|item| item.text().as_deref().map(str::to_string));
|
||||
|
||||
assert_eq!(
|
||||
clipboard_text,
|
||||
Some("line2\n".to_string()),
|
||||
"Copying a line without trailing newline should include a newline"
|
||||
);
|
||||
|
||||
cx.set_state("line1\nˇ");
|
||||
|
||||
cx.update_editor(|e, window, cx| e.paste(&Paste, window, cx));
|
||||
|
||||
cx.assert_editor_state("line1\nline2\nˇ");
|
||||
}
|
||||
|
||||
@@ -432,15 +432,6 @@ impl EditorElement {
|
||||
register_action(editor, window, Editor::open_selected_filename);
|
||||
register_action(editor, window, Editor::fold);
|
||||
register_action(editor, window, Editor::fold_at_level);
|
||||
register_action(editor, window, Editor::fold_at_level_1);
|
||||
register_action(editor, window, Editor::fold_at_level_2);
|
||||
register_action(editor, window, Editor::fold_at_level_3);
|
||||
register_action(editor, window, Editor::fold_at_level_4);
|
||||
register_action(editor, window, Editor::fold_at_level_5);
|
||||
register_action(editor, window, Editor::fold_at_level_6);
|
||||
register_action(editor, window, Editor::fold_at_level_7);
|
||||
register_action(editor, window, Editor::fold_at_level_8);
|
||||
register_action(editor, window, Editor::fold_at_level_9);
|
||||
register_action(editor, window, Editor::fold_all);
|
||||
register_action(editor, window, Editor::fold_function_bodies);
|
||||
register_action(editor, window, Editor::fold_recursive);
|
||||
@@ -681,7 +672,6 @@ impl EditorElement {
|
||||
.drag_and_drop_selection
|
||||
.enabled
|
||||
&& click_count == 1
|
||||
&& !modifiers.shift
|
||||
{
|
||||
let newest_anchor = editor.selections.newest_anchor();
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
@@ -740,35 +730,6 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
if !is_singleton {
|
||||
let display_row = (ScrollPixelOffset::from(
|
||||
(event.position - gutter_hitbox.bounds.origin).y / position_map.line_height,
|
||||
) + position_map.scroll_position.y) as u32;
|
||||
let multi_buffer_row = position_map
|
||||
.snapshot
|
||||
.display_point_to_point(DisplayPoint::new(DisplayRow(display_row), 0), Bias::Right)
|
||||
.row;
|
||||
if line_numbers
|
||||
.get(&MultiBufferRow(multi_buffer_row))
|
||||
.and_then(|line_number| line_number.hitbox.as_ref())
|
||||
.is_some_and(|hitbox| hitbox.contains(&event.position))
|
||||
{
|
||||
let line_offset_from_top = display_row - position_map.scroll_position.y as u32;
|
||||
|
||||
editor.open_excerpts_common(
|
||||
Some(JumpData::MultiBufferRow {
|
||||
row: MultiBufferRow(multi_buffer_row),
|
||||
line_offset_from_top,
|
||||
}),
|
||||
modifiers.alt,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
cx.stop_propagation();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let position = point_for_position.previous_valid;
|
||||
if let Some(mode) = Editor::columnar_selection_mode(&modifiers, cx) {
|
||||
editor.select(
|
||||
@@ -806,6 +767,34 @@ impl EditorElement {
|
||||
);
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
if !is_singleton {
|
||||
let display_row = (ScrollPixelOffset::from(
|
||||
(event.position - gutter_hitbox.bounds.origin).y / position_map.line_height,
|
||||
) + position_map.scroll_position.y) as u32;
|
||||
let multi_buffer_row = position_map
|
||||
.snapshot
|
||||
.display_point_to_point(DisplayPoint::new(DisplayRow(display_row), 0), Bias::Right)
|
||||
.row;
|
||||
if line_numbers
|
||||
.get(&MultiBufferRow(multi_buffer_row))
|
||||
.and_then(|line_number| line_number.hitbox.as_ref())
|
||||
.is_some_and(|hitbox| hitbox.contains(&event.position))
|
||||
{
|
||||
let line_offset_from_top = display_row - position_map.scroll_position.y as u32;
|
||||
|
||||
editor.open_excerpts_common(
|
||||
Some(JumpData::MultiBufferRow {
|
||||
row: MultiBufferRow(multi_buffer_row),
|
||||
line_offset_from_top,
|
||||
}),
|
||||
modifiers.alt,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
cx.stop_propagation();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn mouse_right_down(
|
||||
|
||||
@@ -1,46 +1,47 @@
|
||||
use crate::{Editor, RangeToAnchorExt};
|
||||
use gpui::{Context, HighlightStyle};
|
||||
use gpui::{Context, HighlightStyle, Window};
|
||||
use language::CursorShape;
|
||||
use multi_buffer::ToOffset;
|
||||
use theme::ActiveTheme;
|
||||
|
||||
enum MatchingBracketHighlight {}
|
||||
|
||||
pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut Context<Editor>) {
|
||||
pub fn refresh_matching_bracket_highlights(
|
||||
editor: &mut Editor,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
editor.clear_highlights::<MatchingBracketHighlight>(cx);
|
||||
|
||||
let buffer_snapshot = editor.buffer.read(cx).snapshot(cx);
|
||||
let newest_selection = editor
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.map(|anchor| anchor.to_offset(&buffer_snapshot));
|
||||
let newest_selection = editor.selections.newest::<usize>(cx);
|
||||
// Don't highlight brackets if the selection isn't empty
|
||||
if !newest_selection.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let head = newest_selection.head();
|
||||
if head > buffer_snapshot.len() {
|
||||
if head > snapshot.buffer_snapshot().len() {
|
||||
log::error!("bug: cursor offset is out of range while refreshing bracket highlights");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut tail = head;
|
||||
if (editor.cursor_shape == CursorShape::Block || editor.cursor_shape == CursorShape::Hollow)
|
||||
&& head < buffer_snapshot.len()
|
||||
&& head < snapshot.buffer_snapshot().len()
|
||||
{
|
||||
if let Some(tail_ch) = buffer_snapshot.chars_at(tail).next() {
|
||||
if let Some(tail_ch) = snapshot.buffer_snapshot().chars_at(tail).next() {
|
||||
tail += tail_ch.len_utf8();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((opening_range, closing_range)) =
|
||||
buffer_snapshot.innermost_enclosing_bracket_ranges(head..tail, None)
|
||||
if let Some((opening_range, closing_range)) = snapshot
|
||||
.buffer_snapshot()
|
||||
.innermost_enclosing_bracket_ranges(head..tail, None)
|
||||
{
|
||||
editor.highlight_text::<MatchingBracketHighlight>(
|
||||
vec![
|
||||
opening_range.to_anchors(&buffer_snapshot),
|
||||
closing_range.to_anchors(&buffer_snapshot),
|
||||
opening_range.to_anchors(&snapshot.buffer_snapshot()),
|
||||
closing_range.to_anchors(&snapshot.buffer_snapshot()),
|
||||
],
|
||||
HighlightStyle {
|
||||
background_color: Some(
|
||||
|
||||
@@ -307,6 +307,7 @@ pub fn update_inlay_link_and_hover_points(
|
||||
buffer_snapshot.anchor_after(point_for_position.next_valid.to_point(snapshot));
|
||||
if let Some(hovered_hint) = editor
|
||||
.visible_inlay_hints(cx)
|
||||
.into_iter()
|
||||
.skip_while(|hint| {
|
||||
hint.position
|
||||
.cmp(&previous_valid_anchor, &buffer_snapshot)
|
||||
|
||||
@@ -1013,7 +1013,7 @@ fn fetch_and_update_hints(
|
||||
.cloned()
|
||||
})?;
|
||||
|
||||
let visible_hints = editor.update(cx, |editor, cx| editor.visible_inlay_hints(cx).cloned().collect::<Vec<_>>())?;
|
||||
let visible_hints = editor.update(cx, |editor, cx| editor.visible_inlay_hints(cx))?;
|
||||
let new_hints = match inlay_hints_fetch_task {
|
||||
Some(fetch_task) => {
|
||||
log::debug!(
|
||||
@@ -1495,7 +1495,7 @@ pub mod tests {
|
||||
.into_response()
|
||||
.expect("work done progress create request failed");
|
||||
cx.executor().run_until_parked();
|
||||
fake_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::ProgressToken::String(progress_token.to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
|
||||
lsp::WorkDoneProgressBegin::default(),
|
||||
@@ -1515,7 +1515,7 @@ pub mod tests {
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
fake_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::ProgressToken::String(progress_token.to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
|
||||
lsp::WorkDoneProgressEnd::default(),
|
||||
@@ -3570,6 +3570,7 @@ pub mod tests {
|
||||
pub fn visible_hint_labels(editor: &Editor, cx: &Context<Editor>) -> Vec<String> {
|
||||
editor
|
||||
.visible_inlay_hints(cx)
|
||||
.into_iter()
|
||||
.map(|hint| hint.text().to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ use ui::{App, Context, Window};
|
||||
use util::post_inc;
|
||||
|
||||
use crate::{
|
||||
DisplayPoint, Editor, EditorSettings, EditorSnapshot, FETCH_COLORS_DEBOUNCE_TIMEOUT, InlayId,
|
||||
InlaySplice, RangeToAnchorExt, display_map::Inlay, editor_settings::DocumentColorsRenderMode,
|
||||
DisplayPoint, Editor, EditorSettings, EditorSnapshot, InlayId, InlaySplice, RangeToAnchorExt,
|
||||
display_map::Inlay, editor_settings::DocumentColorsRenderMode,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -193,12 +193,7 @@ impl Editor {
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
self.refresh_colors_task = cx.spawn(async move |editor, cx| {
|
||||
cx.background_executor()
|
||||
.timer(FETCH_COLORS_DEBOUNCE_TIMEOUT)
|
||||
.await;
|
||||
|
||||
cx.spawn(async move |editor, cx| {
|
||||
let all_colors = join_all(all_colors_task).await;
|
||||
if all_colors.is_empty() {
|
||||
return;
|
||||
@@ -425,6 +420,7 @@ impl Editor {
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,8 +35,6 @@ pub struct SelectionsCollection {
|
||||
disjoint: Arc<[Selection<Anchor>]>,
|
||||
/// A pending selection, such as when the mouse is being dragged
|
||||
pending: Option<PendingSelection>,
|
||||
select_mode: SelectMode,
|
||||
is_extending: bool,
|
||||
}
|
||||
|
||||
impl SelectionsCollection {
|
||||
@@ -57,8 +55,6 @@ impl SelectionsCollection {
|
||||
},
|
||||
mode: SelectMode::Character,
|
||||
}),
|
||||
select_mode: SelectMode::Character,
|
||||
is_extending: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,27 +184,6 @@ impl SelectionsCollection {
|
||||
selections
|
||||
}
|
||||
|
||||
/// Returns all of the selections, adjusted to take into account the selection line_mode. Uses a provided snapshot to resolve selections.
|
||||
pub fn all_adjusted_with_snapshot(
|
||||
&self,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Vec<Selection<Point>> {
|
||||
let mut selections = self
|
||||
.disjoint
|
||||
.iter()
|
||||
.chain(self.pending_anchor())
|
||||
.map(|anchor| anchor.map(|anchor| anchor.to_point(&snapshot)))
|
||||
.collect::<Vec<_>>();
|
||||
if self.line_mode {
|
||||
for selection in &mut selections {
|
||||
let new_range = snapshot.expand_to_line(selection.range());
|
||||
selection.start = new_range.start;
|
||||
selection.end = new_range.end;
|
||||
}
|
||||
}
|
||||
selections
|
||||
}
|
||||
|
||||
/// Returns the newest selection, adjusted to take into account the selection line_mode
|
||||
pub fn newest_adjusted(&self, cx: &mut App) -> Selection<Point> {
|
||||
let mut selection = self.newest::<Point>(cx);
|
||||
@@ -460,22 +435,6 @@ impl SelectionsCollection {
|
||||
pub fn set_line_mode(&mut self, line_mode: bool) {
|
||||
self.line_mode = line_mode;
|
||||
}
|
||||
|
||||
pub fn select_mode(&self) -> &SelectMode {
|
||||
&self.select_mode
|
||||
}
|
||||
|
||||
pub fn set_select_mode(&mut self, select_mode: SelectMode) {
|
||||
self.select_mode = select_mode;
|
||||
}
|
||||
|
||||
pub fn is_extending(&self) -> bool {
|
||||
self.is_extending
|
||||
}
|
||||
|
||||
pub fn set_is_extending(&mut self, is_extending: bool) {
|
||||
self.is_extending = is_extending;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MutableSelectionsCollection<'a> {
|
||||
|
||||
@@ -262,77 +262,6 @@ impl EditorLspTestContext {
|
||||
Self::new(language, capabilities, cx).await
|
||||
}
|
||||
|
||||
pub async fn new_tsx(
|
||||
capabilities: lsp::ServerCapabilities,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) -> EditorLspTestContext {
|
||||
let mut word_characters: HashSet<char> = Default::default();
|
||||
word_characters.insert('$');
|
||||
word_characters.insert('#');
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "TSX".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["tsx".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
brackets: language::BracketPairConfig {
|
||||
pairs: vec![language::BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: true,
|
||||
surround: true,
|
||||
newline: true,
|
||||
}],
|
||||
disabled_scopes_by_bracket_ix: Default::default(),
|
||||
},
|
||||
word_characters,
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
|
||||
)
|
||||
.with_queries(LanguageQueries {
|
||||
brackets: Some(Cow::from(indoc! {r#"
|
||||
("(" @open ")" @close)
|
||||
("[" @open "]" @close)
|
||||
("{" @open "}" @close)
|
||||
("<" @open ">" @close)
|
||||
("<" @open "/>" @close)
|
||||
("</" @open ">" @close)
|
||||
("\"" @open "\"" @close)
|
||||
("'" @open "'" @close)
|
||||
("`" @open "`" @close)
|
||||
((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only))"#})),
|
||||
indents: Some(Cow::from(indoc! {r#"
|
||||
[
|
||||
(call_expression)
|
||||
(assignment_expression)
|
||||
(member_expression)
|
||||
(lexical_declaration)
|
||||
(variable_declaration)
|
||||
(assignment_expression)
|
||||
(if_statement)
|
||||
(for_statement)
|
||||
] @indent
|
||||
|
||||
(_ "[" "]" @end) @indent
|
||||
(_ "<" ">" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
|
||||
(jsx_opening_element ">" @end) @indent
|
||||
|
||||
(jsx_element
|
||||
(jsx_opening_element) @start
|
||||
(jsx_closing_element)? @end) @indent
|
||||
"#})),
|
||||
..Default::default()
|
||||
})
|
||||
.expect("Could not parse queries");
|
||||
|
||||
Self::new(language, capabilities, cx).await
|
||||
}
|
||||
|
||||
pub async fn new_html(cx: &mut gpui::TestAppContext) -> Self {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
@@ -440,7 +369,7 @@ impl EditorLspTestContext {
|
||||
}
|
||||
|
||||
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
|
||||
self.lsp.notify::<T>(params);
|
||||
self.lsp.notify::<T>(¶ms);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt,
|
||||
AnchorRangeExt, DisplayPoint, Editor, MultiBuffer, RowExt,
|
||||
display_map::{HighlightKey, ToDisplayPoint},
|
||||
};
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
@@ -24,7 +24,6 @@ use std::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
},
|
||||
};
|
||||
use text::Selection;
|
||||
use util::{
|
||||
assert_set_eq,
|
||||
test::{generate_marked_text, marked_text_ranges},
|
||||
@@ -389,23 +388,6 @@ impl EditorTestContext {
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_excerpts_with_selections(&mut self, marked_text: &str) {
|
||||
let actual_text = self.to_format_multibuffer_as_marked_text();
|
||||
let fmt_additional_notes = || {
|
||||
struct Format<'a, T: std::fmt::Display>(&'a str, &'a T);
|
||||
|
||||
impl<T: std::fmt::Display> std::fmt::Display for Format<'_, T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"\n\n----- EXPECTED: -----\n\n{}\n\n----- ACTUAL: -----\n\n{}\n\n",
|
||||
self.0, self.1
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Format(marked_text, &actual_text)
|
||||
};
|
||||
|
||||
let expected_excerpts = marked_text
|
||||
.strip_prefix("[EXCERPT]\n")
|
||||
.unwrap()
|
||||
@@ -426,10 +408,9 @@ impl EditorTestContext {
|
||||
|
||||
assert!(
|
||||
excerpts.len() == expected_excerpts.len(),
|
||||
"should have {} excerpts, got {}{}",
|
||||
"should have {} excerpts, got {}",
|
||||
expected_excerpts.len(),
|
||||
excerpts.len(),
|
||||
fmt_additional_notes(),
|
||||
excerpts.len()
|
||||
);
|
||||
|
||||
for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() {
|
||||
@@ -443,25 +424,18 @@ impl EditorTestContext {
|
||||
if !expected_selections.is_empty() {
|
||||
assert!(
|
||||
is_selected,
|
||||
"excerpt {ix} should contain selections. got {:?}{}",
|
||||
"excerpt {ix} should be selected. got {:?}",
|
||||
self.editor_state(),
|
||||
fmt_additional_notes(),
|
||||
);
|
||||
} else {
|
||||
assert!(
|
||||
!is_selected,
|
||||
"excerpt {ix} should not contain selections, got: {selections:?}{}",
|
||||
fmt_additional_notes(),
|
||||
"excerpt {ix} should not be selected, got: {selections:?}",
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
assert!(
|
||||
!is_folded,
|
||||
"excerpt {} should not be folded{}",
|
||||
ix,
|
||||
fmt_additional_notes()
|
||||
);
|
||||
assert!(!is_folded, "excerpt {} should not be folded", ix);
|
||||
assert_eq!(
|
||||
multibuffer_snapshot
|
||||
.text_for_range(Anchor::range_in_buffer(
|
||||
@@ -470,9 +444,7 @@ impl EditorTestContext {
|
||||
range.context.clone()
|
||||
))
|
||||
.collect::<String>(),
|
||||
expected_text,
|
||||
"{}",
|
||||
fmt_additional_notes(),
|
||||
expected_text
|
||||
);
|
||||
|
||||
let selections = selections
|
||||
@@ -488,38 +460,13 @@ impl EditorTestContext {
|
||||
.collect::<Vec<_>>();
|
||||
// todo: selections that cross excerpt boundaries..
|
||||
assert_eq!(
|
||||
selections,
|
||||
expected_selections,
|
||||
"excerpt {} has incorrect selections{}",
|
||||
selections, expected_selections,
|
||||
"excerpt {} has incorrect selections",
|
||||
ix,
|
||||
fmt_additional_notes()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn to_format_multibuffer_as_marked_text(&mut self) -> FormatMultiBufferAsMarkedText {
|
||||
let (multibuffer_snapshot, selections, excerpts) = self.update_editor(|editor, _, cx| {
|
||||
let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
|
||||
|
||||
let selections = editor.selections.disjoint_anchors_arc().to_vec();
|
||||
let excerpts = multibuffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(e_id, snapshot, range)| {
|
||||
let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx);
|
||||
(e_id, snapshot.clone(), range, is_folded)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
(multibuffer_snapshot, selections, excerpts)
|
||||
});
|
||||
|
||||
FormatMultiBufferAsMarkedText {
|
||||
multibuffer_snapshot,
|
||||
selections,
|
||||
excerpts,
|
||||
}
|
||||
}
|
||||
|
||||
/// Make an assertion about the editor's text and the ranges and directions
|
||||
/// of its selections using a string containing embedded range markers.
|
||||
///
|
||||
@@ -624,63 +571,6 @@ impl EditorTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
struct FormatMultiBufferAsMarkedText {
|
||||
multibuffer_snapshot: MultiBufferSnapshot,
|
||||
selections: Vec<Selection<Anchor>>,
|
||||
excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange<text::Anchor>, bool)>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FormatMultiBufferAsMarkedText {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let Self {
|
||||
multibuffer_snapshot,
|
||||
selections,
|
||||
excerpts,
|
||||
} = self;
|
||||
|
||||
for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() {
|
||||
write!(f, "[EXCERPT]\n")?;
|
||||
if *is_folded {
|
||||
write!(f, "[FOLDED]\n")?;
|
||||
}
|
||||
|
||||
let mut text = multibuffer_snapshot
|
||||
.text_for_range(Anchor::range_in_buffer(
|
||||
*excerpt_id,
|
||||
snapshot.remote_id(),
|
||||
range.context.clone(),
|
||||
))
|
||||
.collect::<String>();
|
||||
|
||||
let selections = selections
|
||||
.iter()
|
||||
.filter(|&s| s.head().excerpt_id == *excerpt_id)
|
||||
.map(|s| {
|
||||
let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
|
||||
- text::ToOffset::to_offset(&range.context.start, &snapshot);
|
||||
let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
|
||||
- text::ToOffset::to_offset(&range.context.start, &snapshot);
|
||||
tail..head
|
||||
})
|
||||
.rev()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for selection in selections {
|
||||
if selection.is_empty() {
|
||||
text.insert(selection.start, 'ˇ');
|
||||
continue;
|
||||
}
|
||||
text.insert(selection.end, '»');
|
||||
text.insert(selection.start, '«');
|
||||
}
|
||||
|
||||
write!(f, "{text}")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_state_with_diff(
|
||||
editor: &Entity<Editor>,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use fs::Fs;
|
||||
use language::LanguageName;
|
||||
@@ -226,9 +226,8 @@ impl ExtensionManifest {
|
||||
.load(&extension_manifest_path)
|
||||
.await
|
||||
.with_context(|| format!("failed to load {extension_name} extension.toml"))?;
|
||||
toml::from_str(&manifest_content).map_err(|err| {
|
||||
anyhow!("Invalid extension.toml for extension {extension_name}:\n{err}")
|
||||
})
|
||||
toml::from_str(&manifest_content)
|
||||
.with_context(|| format!("invalid extension.toml for extension {extension_name}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ use ui::{
|
||||
};
|
||||
use vim_mode_setting::VimModeSetting;
|
||||
use workspace::{
|
||||
Workspace,
|
||||
Workspace, WorkspaceId,
|
||||
item::{Item, ItemEvent},
|
||||
};
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
@@ -1551,6 +1551,15 @@ impl Item for ExtensionsPage {
|
||||
false
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: Option<WorkspaceId>,
|
||||
_window: &mut Window,
|
||||
_: &mut Context<Self>,
|
||||
) -> Option<Entity<Self>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
|
||||
f(*event)
|
||||
}
|
||||
|
||||
@@ -17,3 +17,9 @@ pub struct PanicFeatureFlag;
|
||||
impl FeatureFlag for PanicFeatureFlag {
|
||||
const NAME: &'static str = "panic";
|
||||
}
|
||||
|
||||
pub struct CodexAcpFeatureFlag;
|
||||
|
||||
impl FeatureFlag for CodexAcpFeatureFlag {
|
||||
const NAME: &'static str = "codex-acp";
|
||||
}
|
||||
|
||||
@@ -16,12 +16,14 @@ test-support = []
|
||||
|
||||
[dependencies]
|
||||
gpui.workspace = true
|
||||
menu.workspace = true
|
||||
system_specs.workspace = true
|
||||
ui.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -2,13 +2,19 @@ use gpui::{App, ClipboardItem, PromptLevel, actions};
|
||||
use system_specs::{CopySystemSpecsIntoClipboard, SystemSpecs};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
use zed_actions::feedback::{EmailZed, FileBugReport, RequestFeature};
|
||||
use zed_actions::feedback::FileBugReport;
|
||||
|
||||
pub mod feedback_modal;
|
||||
|
||||
actions!(
|
||||
zed,
|
||||
[
|
||||
/// Opens email client to send feedback to Zed support.
|
||||
EmailZed,
|
||||
/// Opens the Zed repository on GitHub.
|
||||
OpenZedRepo,
|
||||
/// Opens the feature request form.
|
||||
RequestFeature,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -42,7 +48,11 @@ fn email_body(specs: &SystemSpecs) -> String {
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.observe_new(|workspace: &mut Workspace, _, _| {
|
||||
cx.observe_new(|workspace: &mut Workspace, window, cx| {
|
||||
let Some(window) = window else {
|
||||
return;
|
||||
};
|
||||
feedback_modal::FeedbackModal::register(workspace, window, cx);
|
||||
workspace
|
||||
.register_action(|_, _: &CopySystemSpecsIntoClipboard, window, cx| {
|
||||
let specs = SystemSpecs::new(window, cx);
|
||||
|
||||
113
crates/feedback/src/feedback_modal.rs
Normal file
113
crates/feedback/src/feedback_modal.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
use gpui::{App, Context, DismissEvent, EventEmitter, FocusHandle, Focusable, Render, Window};
|
||||
use ui::{IconPosition, prelude::*};
|
||||
use workspace::{ModalView, Workspace};
|
||||
use zed_actions::feedback::GiveFeedback;
|
||||
|
||||
use crate::{EmailZed, FileBugReport, OpenZedRepo, RequestFeature};
|
||||
|
||||
pub struct FeedbackModal {
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
impl Focusable for FeedbackModal {
|
||||
fn focus_handle(&self, _: &App) -> FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
impl EventEmitter<DismissEvent> for FeedbackModal {}
|
||||
|
||||
impl ModalView for FeedbackModal {}
|
||||
|
||||
impl FeedbackModal {
|
||||
pub fn register(workspace: &mut Workspace, _: &mut Window, cx: &mut Context<Workspace>) {
|
||||
let _handle = cx.entity().downgrade();
|
||||
workspace.register_action(move |workspace, _: &GiveFeedback, window, cx| {
|
||||
workspace.toggle_modal(window, cx, move |_, cx| FeedbackModal::new(cx));
|
||||
});
|
||||
}
|
||||
|
||||
pub fn new(cx: &mut Context<Self>) -> Self {
|
||||
Self {
|
||||
focus_handle: cx.focus_handle(),
|
||||
}
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.emit(DismissEvent)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for FeedbackModal {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let open_zed_repo =
|
||||
cx.listener(|_, _, window, cx| window.dispatch_action(Box::new(OpenZedRepo), cx));
|
||||
|
||||
v_flex()
|
||||
.key_context("GiveFeedback")
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.elevation_3(cx)
|
||||
.w_96()
|
||||
.h_auto()
|
||||
.p_4()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Headline::new("Give Feedback"))
|
||||
.child(
|
||||
IconButton::new("close-btn", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(cx.listener(move |_, _, window, cx| {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
this.update(cx, |_, cx| cx.emit(DismissEvent)).ok();
|
||||
})
|
||||
.detach();
|
||||
})),
|
||||
),
|
||||
)
|
||||
.child(Label::new("Thanks for using Zed! To share your experience with us, reach for the channel that's the most appropriate:"))
|
||||
.child(
|
||||
Button::new("file-a-bug-report", "File a Bug Report")
|
||||
.full_width()
|
||||
.icon(IconName::Debug)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(FileBugReport), cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("request-a-feature", "Request a Feature")
|
||||
.full_width()
|
||||
.icon(IconName::Sparkle)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(RequestFeature), cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("send-us_an-email", "Send an Email")
|
||||
.full_width()
|
||||
.icon(IconName::Envelope)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(EmailZed), cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("zed_repository", "GitHub Repository")
|
||||
.full_width()
|
||||
.icon(IconName::Github)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(open_zed_repo),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1172,25 +1172,18 @@ impl FileFinderDelegate {
|
||||
)
|
||||
}
|
||||
|
||||
/// Attempts to resolve an absolute file path and update the search matches if found.
|
||||
///
|
||||
/// If the query path resolves to an absolute file that exists in the project,
|
||||
/// this method will find the corresponding worktree and relative path, create a
|
||||
/// match for it, and update the picker's search results.
|
||||
///
|
||||
/// Returns `true` if the absolute path exists, otherwise returns `false`.
|
||||
fn lookup_absolute_path(
|
||||
&self,
|
||||
query: FileSearchQuery,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<bool> {
|
||||
) -> Task<()> {
|
||||
cx.spawn_in(window, async move |picker, cx| {
|
||||
let Some(project) = picker
|
||||
.read_with(cx, |picker, _| picker.delegate.project.clone())
|
||||
.log_err()
|
||||
else {
|
||||
return false;
|
||||
return;
|
||||
};
|
||||
|
||||
let query_path = Path::new(query.path_query());
|
||||
@@ -1223,7 +1216,7 @@ impl FileFinderDelegate {
|
||||
})
|
||||
.log_err();
|
||||
if update_result.is_none() {
|
||||
return abs_file_exists;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1236,7 +1229,6 @@ impl FileFinderDelegate {
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.log_err();
|
||||
abs_file_exists
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1385,14 +1377,13 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
} else {
|
||||
let path_position = PathWithPosition::parse_str(raw_query);
|
||||
let raw_query = raw_query.trim().trim_end_matches(':').to_owned();
|
||||
let path = path_position.path.clone();
|
||||
let path_str = path_position.path.to_str();
|
||||
let path_trimmed = path_str.unwrap_or(&raw_query).trim_end_matches(':');
|
||||
let path = path_position.path.to_str();
|
||||
let path_trimmed = path.unwrap_or(&raw_query).trim_end_matches(':');
|
||||
let file_query_end = if path_trimmed == raw_query {
|
||||
None
|
||||
} else {
|
||||
// Safe to unwrap as we won't get here when the unwrap in if fails
|
||||
Some(path_str.unwrap().len())
|
||||
Some(path.unwrap().len())
|
||||
};
|
||||
|
||||
let query = FileSearchQuery {
|
||||
@@ -1401,29 +1392,11 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
path_position,
|
||||
};
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let _ = maybe!(async move {
|
||||
let is_absolute_path = path.is_absolute();
|
||||
let did_resolve_abs_path = is_absolute_path
|
||||
&& this
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.lookup_absolute_path(query.clone(), window, cx)
|
||||
})?
|
||||
.await;
|
||||
|
||||
// Only check for relative paths if no absolute paths were
|
||||
// found.
|
||||
if !did_resolve_abs_path {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.spawn_search(query, window, cx)
|
||||
})?
|
||||
.await;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.await;
|
||||
})
|
||||
if Path::new(query.path_query()).is_absolute() {
|
||||
self.lookup_absolute_path(query, window, cx)
|
||||
} else {
|
||||
self.spawn_search(query, window, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3069,49 +3069,3 @@ async fn test_filename_precedence(cx: &mut TestAppContext) {
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_paths_with_starting_slash(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/root"),
|
||||
json!({
|
||||
"a": {
|
||||
"file1.txt": "",
|
||||
"b": {
|
||||
"file2.txt": "",
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let (picker, workspace, cx) = build_find_picker(project, cx);
|
||||
|
||||
let matching_abs_path = "/file1.txt".to_string();
|
||||
picker
|
||||
.update_in(cx, |picker, window, cx| {
|
||||
picker
|
||||
.delegate
|
||||
.update_matches(matching_abs_path, window, cx)
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(
|
||||
collect_search_matches(picker).search_paths_only(),
|
||||
vec![rel_path("a/file1.txt").into()],
|
||||
"Relative path starting with slash should match"
|
||||
)
|
||||
});
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
cx.read(|cx| {
|
||||
let active_editor = workspace.read(cx).active_item_as::<Editor>(cx).unwrap();
|
||||
assert_eq!(active_editor.read(cx).title(cx), "file1.txt");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -755,7 +755,7 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
.with_default_highlights(
|
||||
&window.text_style(),
|
||||
vec![(
|
||||
delta..delta + label_len,
|
||||
delta..label_len,
|
||||
HighlightStyle::color(Color::Conflict.color(cx)),
|
||||
)],
|
||||
)
|
||||
@@ -765,7 +765,7 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
.with_default_highlights(
|
||||
&window.text_style(),
|
||||
vec![(
|
||||
delta..delta + label_len,
|
||||
delta..label_len,
|
||||
HighlightStyle::color(Color::Created.color(cx)),
|
||||
)],
|
||||
)
|
||||
|
||||
@@ -23,7 +23,6 @@ derive_more.workspace = true
|
||||
git2.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
itertools.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
regex.workspace = true
|
||||
@@ -37,7 +36,6 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
url.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -94,8 +94,6 @@ actions!(
|
||||
OpenModifiedFiles,
|
||||
/// Clones a repository.
|
||||
Clone,
|
||||
/// Adds a file to .gitignore.
|
||||
AddToGitignore,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -5,12 +5,9 @@ use async_trait::async_trait;
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use gpui::{App, Global, SharedString};
|
||||
use http_client::HttpClient;
|
||||
use itertools::Itertools;
|
||||
use parking_lot::RwLock;
|
||||
use url::Url;
|
||||
|
||||
use crate::repository::RepoPath;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct PullRequest {
|
||||
pub number: u32,
|
||||
@@ -58,21 +55,10 @@ pub struct BuildCommitPermalinkParams<'a> {
|
||||
|
||||
pub struct BuildPermalinkParams<'a> {
|
||||
pub sha: &'a str,
|
||||
/// URL-escaped path using unescaped `/` as the directory separator.
|
||||
pub path: String,
|
||||
pub path: &'a str,
|
||||
pub selection: Option<Range<u32>>,
|
||||
}
|
||||
|
||||
impl<'a> BuildPermalinkParams<'a> {
|
||||
pub fn new(sha: &'a str, path: &RepoPath, selection: Option<Range<u32>>) -> Self {
|
||||
Self {
|
||||
sha,
|
||||
path: path.components().map(urlencoding::encode).join("/"),
|
||||
selection,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A Git hosting provider.
|
||||
#[async_trait]
|
||||
pub trait GitHostingProvider {
|
||||
|
||||
@@ -30,4 +30,3 @@ workspace-hack.workspace = true
|
||||
indoc.workspace = true
|
||||
serde_json.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
git = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -126,7 +126,6 @@ impl GitHostingProvider for Bitbucket {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -183,7 +182,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), None),
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs";
|
||||
@@ -197,7 +200,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(6..6)),
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-7";
|
||||
@@ -211,7 +218,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(23..47)),
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url =
|
||||
|
||||
@@ -191,7 +191,6 @@ impl GitHostingProvider for Chromium {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use indoc::indoc;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
@@ -219,11 +218,11 @@ mod tests {
|
||||
owner: Arc::from(""),
|
||||
repo: "chromium/src".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
&repo_path("ui/base/cursor/cursor.h"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
path: "ui/base/cursor/cursor.h",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://chromium.googlesource.com/chromium/src/+/fea5080b182fc92e3be0c01c5dece602fe70b588/ui/base/cursor/cursor.h";
|
||||
@@ -237,11 +236,11 @@ mod tests {
|
||||
owner: Arc::from(""),
|
||||
repo: "chromium/src".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
&repo_path("ui/base/cursor/cursor.h"),
|
||||
Some(18..18),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
path: "ui/base/cursor/cursor.h",
|
||||
selection: Some(18..18),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://chromium.googlesource.com/chromium/src/+/fea5080b182fc92e3be0c01c5dece602fe70b588/ui/base/cursor/cursor.h#19";
|
||||
@@ -255,11 +254,11 @@ mod tests {
|
||||
owner: Arc::from(""),
|
||||
repo: "chromium/src".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
&repo_path("ui/base/cursor/cursor.h"),
|
||||
Some(18..30),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
path: "ui/base/cursor/cursor.h",
|
||||
selection: Some(18..30),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://chromium.googlesource.com/chromium/src/+/fea5080b182fc92e3be0c01c5dece602fe70b588/ui/base/cursor/cursor.h#19";
|
||||
|
||||
@@ -204,7 +204,6 @@ impl GitHostingProvider for Codeberg {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -246,11 +245,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs";
|
||||
@@ -264,11 +263,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -282,11 +281,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48";
|
||||
|
||||
@@ -84,7 +84,6 @@ impl GitHostingProvider for Gitee {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -126,11 +125,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs";
|
||||
@@ -144,11 +143,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -162,11 +161,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48";
|
||||
|
||||
@@ -259,7 +259,6 @@ impl GitHostingProvider for Github {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use indoc::indoc;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
@@ -401,11 +400,11 @@ mod tests {
|
||||
};
|
||||
let permalink = Github::public_instance().build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
@@ -419,11 +418,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
&repo_path("crates/zed/src/main.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
@@ -437,11 +436,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -455,11 +454,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-L48";
|
||||
@@ -507,23 +506,4 @@ mod tests {
|
||||
};
|
||||
assert_eq!(github.extract_pull_request(&remote, message), None);
|
||||
}
|
||||
|
||||
/// Regression test for issue #39875
|
||||
#[test]
|
||||
fn test_git_permalink_url_escaping() {
|
||||
let permalink = Github::public_instance().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "nonexistent".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"3ef1539900037dd3601be7149b2b39ed6d0ce3db",
|
||||
&repo_path("app/blog/[slug]/page.tsx"),
|
||||
Some(7..7),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/nonexistent/blob/3ef1539900037dd3601be7149b2b39ed6d0ce3db/app/blog/%5Bslug%5D/page.tsx#L8";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,7 +126,6 @@ impl GitHostingProvider for Gitlab {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -210,11 +209,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
@@ -228,11 +227,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -246,11 +245,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-48";
|
||||
@@ -267,11 +266,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.some-enterprise.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
@@ -288,11 +287,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
&repo_path("crates/zed/src/main.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab-instance.big-co.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
|
||||
@@ -89,7 +89,6 @@ impl GitHostingProvider for Sourcehut {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -146,11 +145,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
@@ -164,11 +163,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed.git".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
@@ -182,11 +181,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -200,11 +199,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48";
|
||||
|
||||
@@ -43,8 +43,8 @@ struct CommitMetadataFile {
|
||||
worktree_id: WorktreeId,
|
||||
}
|
||||
|
||||
const COMMIT_METADATA_SORT_PREFIX: u64 = 0;
|
||||
const FILE_NAMESPACE_SORT_PREFIX: u64 = 1;
|
||||
const COMMIT_METADATA_NAMESPACE: u64 = 0;
|
||||
const FILE_NAMESPACE: u64 = 1;
|
||||
|
||||
impl CommitView {
|
||||
pub fn open(
|
||||
@@ -145,7 +145,7 @@ impl CommitView {
|
||||
});
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(COMMIT_METADATA_SORT_PREFIX, file.title.clone()),
|
||||
PathKey::namespaced(COMMIT_METADATA_NAMESPACE, file.title.clone()),
|
||||
buffer.clone(),
|
||||
vec![Point::zero()..buffer.read(cx).max_point()],
|
||||
0,
|
||||
@@ -193,7 +193,7 @@ impl CommitView {
|
||||
.collect::<Vec<_>>();
|
||||
let path = snapshot.file().unwrap().path().clone();
|
||||
let _is_newly_added = multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(FILE_NAMESPACE_SORT_PREFIX, path),
|
||||
PathKey::namespaced(FILE_NAMESPACE, path),
|
||||
buffer,
|
||||
diff_hunk_ranges,
|
||||
multibuffer_context_lines(cx),
|
||||
|
||||
@@ -386,7 +386,6 @@ impl GitPanel {
|
||||
cx.observe_global_in::<SettingsStore>(window, move |this, window, cx| {
|
||||
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
|
||||
if is_sort_by_path != was_sort_by_path {
|
||||
this.entries.clear();
|
||||
this.update_visible_entries(window, cx);
|
||||
}
|
||||
was_sort_by_path = is_sort_by_path
|
||||
@@ -870,77 +869,6 @@ impl GitPanel {
|
||||
});
|
||||
}
|
||||
|
||||
fn add_to_gitignore(
|
||||
&mut self,
|
||||
_: &git::AddToGitignore,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
maybe!({
|
||||
let list_entry = self.entries.get(self.selected_entry?)?.clone();
|
||||
let entry = list_entry.status_entry()?.to_owned();
|
||||
|
||||
if !entry.status.is_created() {
|
||||
return Some(());
|
||||
}
|
||||
|
||||
let project = self.project.downgrade();
|
||||
let repo_path = entry.repo_path;
|
||||
let active_repository = self.active_repository.as_ref()?.downgrade();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
let file_path_str = repo_path.0.display(PathStyle::Posix);
|
||||
|
||||
let repo_root = active_repository.read_with(cx, |repository, _| {
|
||||
repository.snapshot().work_directory_abs_path
|
||||
})?;
|
||||
|
||||
let gitignore_abs_path = repo_root.join(".gitignore");
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(gitignore_abs_path, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let mut should_save = false;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let existing_content = buffer.text();
|
||||
|
||||
if existing_content
|
||||
.lines()
|
||||
.any(|line| line.trim() == file_path_str)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let insert_position = existing_content.len();
|
||||
let new_entry = if existing_content.is_empty() {
|
||||
format!("{}\n", file_path_str)
|
||||
} else if existing_content.ends_with('\n') {
|
||||
format!("{}\n", file_path_str)
|
||||
} else {
|
||||
format!("\n{}\n", file_path_str)
|
||||
};
|
||||
|
||||
buffer.edit([(insert_position..insert_position, new_entry)], None, cx);
|
||||
should_save = true;
|
||||
})?;
|
||||
|
||||
if should_save {
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))?
|
||||
.await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
fn revert_entry(
|
||||
&mut self,
|
||||
entry: &GitStatusEntry,
|
||||
@@ -3888,17 +3816,10 @@ impl GitPanel {
|
||||
"Restore File"
|
||||
};
|
||||
let context_menu = ContextMenu::build(window, cx, |context_menu, _, _| {
|
||||
let mut context_menu = context_menu
|
||||
context_menu
|
||||
.context(self.focus_handle.clone())
|
||||
.action(stage_title, ToggleStaged.boxed_clone())
|
||||
.action(restore_title, git::RestoreFile::default().boxed_clone());
|
||||
|
||||
if entry.status.is_created() {
|
||||
context_menu =
|
||||
context_menu.action("Add to .gitignore", git::AddToGitignore.boxed_clone());
|
||||
}
|
||||
|
||||
context_menu
|
||||
.action(restore_title, git::RestoreFile::default().boxed_clone())
|
||||
.separator()
|
||||
.action("Open Diff", Confirm.boxed_clone())
|
||||
.action("Open File", SecondaryConfirm.boxed_clone())
|
||||
@@ -4321,7 +4242,6 @@ impl Render for GitPanel {
|
||||
.on_action(cx.listener(Self::unstage_selected))
|
||||
.on_action(cx.listener(Self::restore_tracked_files))
|
||||
.on_action(cx.listener(Self::revert_selected))
|
||||
.on_action(cx.listener(Self::add_to_gitignore))
|
||||
.on_action(cx.listener(Self::clean_all))
|
||||
.on_action(cx.listener(Self::generate_commit_message_action))
|
||||
.on_action(cx.listener(Self::stash_all))
|
||||
@@ -4967,13 +4887,12 @@ mod tests {
|
||||
repository::repo_path,
|
||||
status::{StatusCode, UnmergedStatus, UnmergedStatusCode},
|
||||
};
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use theme::LoadThemes;
|
||||
use util::path;
|
||||
use util::rel_path::rel_path;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -5291,242 +5210,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_bulk_staging_with_sort_by_paths(cx: &mut TestAppContext) {
|
||||
use GitListEntry::*;
|
||||
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"project": {
|
||||
".git": {},
|
||||
"src": {
|
||||
"main.rs": "fn main() {}",
|
||||
"lib.rs": "pub fn hello() {}",
|
||||
"utils.rs": "pub fn util() {}"
|
||||
},
|
||||
"tests": {
|
||||
"test.rs": "fn test() {}"
|
||||
},
|
||||
"new_file.txt": "new content",
|
||||
"another_new.rs": "// new file",
|
||||
"conflict.txt": "conflicted content"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
fs.set_status_for_repo(
|
||||
Path::new(path!("/root/project/.git")),
|
||||
&[
|
||||
("src/main.rs", StatusCode::Modified.worktree()),
|
||||
("src/lib.rs", StatusCode::Modified.worktree()),
|
||||
("tests/test.rs", StatusCode::Modified.worktree()),
|
||||
("new_file.txt", FileStatus::Untracked),
|
||||
("another_new.rs", FileStatus::Untracked),
|
||||
("src/utils.rs", FileStatus::Untracked),
|
||||
(
|
||||
"conflict.txt",
|
||||
UnmergedStatus {
|
||||
first_head: UnmergedStatusCode::Updated,
|
||||
second_head: UnmergedStatusCode::Updated,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
],
|
||||
);
|
||||
|
||||
let project = Project::test(fs.clone(), [Path::new(path!("/root/project"))], cx).await;
|
||||
let workspace =
|
||||
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.scan_complete()
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let panel = workspace.update(cx, GitPanel::new).unwrap();
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
});
|
||||
cx.executor().advance_clock(2 * UPDATE_DEBOUNCE);
|
||||
handle.await;
|
||||
|
||||
let entries = panel.read_with(cx, |panel, _| panel.entries.clone());
|
||||
#[rustfmt::skip]
|
||||
pretty_assertions::assert_matches!(
|
||||
entries.as_slice(),
|
||||
&[
|
||||
Header(GitHeaderEntry { header: Section::Conflict }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Header(GitHeaderEntry { header: Section::Tracked }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Header(GitHeaderEntry { header: Section::New }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
],
|
||||
);
|
||||
|
||||
assert_entry_paths(
|
||||
&entries,
|
||||
&[
|
||||
None,
|
||||
Some("conflict.txt"),
|
||||
None,
|
||||
Some("src/lib.rs"),
|
||||
Some("src/main.rs"),
|
||||
Some("tests/test.rs"),
|
||||
None,
|
||||
Some("another_new.rs"),
|
||||
Some("new_file.txt"),
|
||||
Some("src/utils.rs"),
|
||||
],
|
||||
);
|
||||
|
||||
let second_status_entry = entries[3].clone();
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.toggle_staged_for_entry(&second_status_entry, window, cx);
|
||||
});
|
||||
|
||||
cx.update(|_window, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.git_panel.get_or_insert_default().sort_by_path = Some(true);
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.selected_entry = Some(7);
|
||||
panel.stage_range(&git::StageRange, window, cx);
|
||||
});
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.scan_complete()
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
});
|
||||
cx.executor().advance_clock(2 * UPDATE_DEBOUNCE);
|
||||
handle.await;
|
||||
|
||||
let entries = panel.read_with(cx, |panel, _| panel.entries.clone());
|
||||
#[rustfmt::skip]
|
||||
pretty_assertions::assert_matches!(
|
||||
entries.as_slice(),
|
||||
&[
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Unmerged(..), staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Staged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Unstaged, .. }),
|
||||
],
|
||||
);
|
||||
|
||||
assert_entry_paths(
|
||||
&entries,
|
||||
&[
|
||||
Some("another_new.rs"),
|
||||
Some("conflict.txt"),
|
||||
Some("new_file.txt"),
|
||||
Some("src/lib.rs"),
|
||||
Some("src/main.rs"),
|
||||
Some("src/utils.rs"),
|
||||
Some("tests/test.rs"),
|
||||
],
|
||||
);
|
||||
|
||||
let third_status_entry = entries[4].clone();
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.toggle_staged_for_entry(&third_status_entry, window, cx);
|
||||
});
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.selected_entry = Some(9);
|
||||
panel.stage_range(&git::StageRange, window, cx);
|
||||
});
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.scan_complete()
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
});
|
||||
cx.executor().advance_clock(2 * UPDATE_DEBOUNCE);
|
||||
handle.await;
|
||||
|
||||
let entries = panel.read_with(cx, |panel, _| panel.entries.clone());
|
||||
#[rustfmt::skip]
|
||||
pretty_assertions::assert_matches!(
|
||||
entries.as_slice(),
|
||||
&[
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Unmerged(..), staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Staged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Staged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Unstaged, .. }),
|
||||
],
|
||||
);
|
||||
|
||||
assert_entry_paths(
|
||||
&entries,
|
||||
&[
|
||||
Some("another_new.rs"),
|
||||
Some("conflict.txt"),
|
||||
Some("new_file.txt"),
|
||||
Some("src/lib.rs"),
|
||||
Some("src/main.rs"),
|
||||
Some("src/utils.rs"),
|
||||
Some("tests/test.rs"),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_amend_commit_message_handling(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -5595,81 +5278,4 @@ mod tests {
|
||||
assert_eq!(current_message, "");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_open_diff(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
".git": {},
|
||||
"tracked": "tracked\n",
|
||||
"untracked": "\n",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
fs.set_head_and_index_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("tracked", "old tracked\n".into())],
|
||||
);
|
||||
|
||||
let project = Project::test(fs.clone(), [Path::new(path!("/project"))], cx).await;
|
||||
let workspace =
|
||||
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
let panel = workspace.update(cx, GitPanel::new).unwrap();
|
||||
|
||||
// Enable the `sort_by_path` setting and wait for entries to be updated,
|
||||
// as there should no longer be separators between Tracked and Untracked
|
||||
// files.
|
||||
cx.update(|_window, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.git_panel.get_or_insert_default().sort_by_path = Some(true);
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
})
|
||||
.await;
|
||||
|
||||
// Confirm that `Open Diff` still works for the untracked file, updating
|
||||
// the Project Diff's active path.
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.selected_entry = Some(1);
|
||||
panel.open_diff(&Confirm, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let _ = workspace.update(cx, |workspace, _window, cx| {
|
||||
let active_path = workspace
|
||||
.item_of_type::<ProjectDiff>(cx)
|
||||
.expect("ProjectDiff should exist")
|
||||
.read(cx)
|
||||
.active_path(cx)
|
||||
.expect("active_path should exist");
|
||||
|
||||
assert_eq!(active_path.path, rel_path("untracked").into_arc());
|
||||
});
|
||||
}
|
||||
|
||||
fn assert_entry_paths(entries: &[GitListEntry], expected_paths: &[Option<&str>]) {
|
||||
assert_eq!(entries.len(), expected_paths.len());
|
||||
for (entry, expected_path) in entries.iter().zip(expected_paths) {
|
||||
assert_eq!(
|
||||
entry.status_entry().map(|status| status
|
||||
.repo_path
|
||||
.0
|
||||
.as_std_path()
|
||||
.to_string_lossy()
|
||||
.to_string()),
|
||||
expected_path.map(|s| s.to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ use editor::{
|
||||
use futures::StreamExt;
|
||||
use git::{
|
||||
Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
|
||||
repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus},
|
||||
repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus},
|
||||
status::FileStatus,
|
||||
};
|
||||
use gpui::{
|
||||
@@ -27,7 +27,7 @@ use language::{Anchor, Buffer, Capability, OffsetRangeExt};
|
||||
use multi_buffer::{MultiBuffer, PathKey};
|
||||
use project::{
|
||||
Project, ProjectPath,
|
||||
git_store::{GitStore, GitStoreEvent, Repository},
|
||||
git_store::{GitStore, GitStoreEvent},
|
||||
};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::any::{Any, TypeId};
|
||||
@@ -73,9 +73,9 @@ struct DiffBuffer {
|
||||
file_status: FileStatus,
|
||||
}
|
||||
|
||||
const CONFLICT_SORT_PREFIX: u64 = 1;
|
||||
const TRACKED_SORT_PREFIX: u64 = 2;
|
||||
const NEW_SORT_PREFIX: u64 = 3;
|
||||
const CONFLICT_NAMESPACE: u64 = 1;
|
||||
const TRACKED_NAMESPACE: u64 = 2;
|
||||
const NEW_NAMESPACE: u64 = 3;
|
||||
|
||||
impl ProjectDiff {
|
||||
pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context<Workspace>) {
|
||||
@@ -234,8 +234,16 @@ impl ProjectDiff {
|
||||
return;
|
||||
};
|
||||
let repo = git_repo.read(cx);
|
||||
let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
|
||||
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0);
|
||||
|
||||
let namespace = if repo.had_conflict_on_last_merge_head_change(&entry.repo_path) {
|
||||
CONFLICT_NAMESPACE
|
||||
} else if entry.status.is_created() {
|
||||
NEW_NAMESPACE
|
||||
} else {
|
||||
TRACKED_NAMESPACE
|
||||
};
|
||||
|
||||
let path_key = PathKey::namespaced(namespace, entry.repo_path.0);
|
||||
|
||||
self.move_to_path(path_key, window, cx)
|
||||
}
|
||||
@@ -380,8 +388,16 @@ impl ProjectDiff {
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
|
||||
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
|
||||
let namespace = if GitPanelSettings::get_global(cx).sort_by_path {
|
||||
TRACKED_NAMESPACE
|
||||
} else if repo.had_conflict_on_last_merge_head_change(&entry.repo_path) {
|
||||
CONFLICT_NAMESPACE
|
||||
} else if entry.status.is_created() {
|
||||
NEW_NAMESPACE
|
||||
} else {
|
||||
TRACKED_NAMESPACE
|
||||
};
|
||||
let path_key = PathKey::namespaced(namespace, entry.repo_path.0.clone());
|
||||
|
||||
previous_paths.remove(&path_key);
|
||||
let load_buffer = self
|
||||
@@ -525,18 +541,6 @@ impl ProjectDiff {
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_prefix(repo: &Repository, repo_path: &RepoPath, status: FileStatus, cx: &App) -> u64 {
|
||||
if GitPanelSettings::get_global(cx).sort_by_path {
|
||||
TRACKED_SORT_PREFIX
|
||||
} else if repo.had_conflict_on_last_merge_head_change(repo_path) {
|
||||
CONFLICT_SORT_PREFIX
|
||||
} else if status.is_created() {
|
||||
NEW_SORT_PREFIX
|
||||
} else {
|
||||
TRACKED_SORT_PREFIX
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for ProjectDiff {}
|
||||
|
||||
impl Focusable for ProjectDiff {
|
||||
@@ -1459,7 +1463,7 @@ mod tests {
|
||||
|
||||
let editor = cx.update_window_entity(&diff, |diff, window, cx| {
|
||||
diff.move_to_path(
|
||||
PathKey::with_sort_prefix(TRACKED_SORT_PREFIX, rel_path("foo").into_arc()),
|
||||
PathKey::namespaced(TRACKED_NAMESPACE, rel_path("foo").into_arc()),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -1480,7 +1484,7 @@ mod tests {
|
||||
|
||||
let editor = cx.update_window_entity(&diff, |diff, window, cx| {
|
||||
diff.move_to_path(
|
||||
PathKey::with_sort_prefix(TRACKED_SORT_PREFIX, rel_path("bar").into_arc()),
|
||||
PathKey::namespaced(TRACKED_NAMESPACE, rel_path("bar").into_arc()),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -1619,13 +1623,14 @@ mod tests {
|
||||
project_diff::{self, ProjectDiff},
|
||||
};
|
||||
|
||||
#[cfg_attr(windows, ignore = "currently fails on windows")]
|
||||
#[gpui::test]
|
||||
async fn test_go_to_prev_hunk_multibuffer(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/a"),
|
||||
"/a",
|
||||
json!({
|
||||
".git": {},
|
||||
"a.txt": "created\n",
|
||||
@@ -1636,7 +1641,7 @@ mod tests {
|
||||
.await;
|
||||
|
||||
fs.set_head_and_index_for_repo(
|
||||
Path::new(path!("/a/.git")),
|
||||
Path::new("/a/.git"),
|
||||
&[
|
||||
("b.txt", "before\n".to_string()),
|
||||
("c.txt", "unchanged\n".to_string()),
|
||||
@@ -1644,7 +1649,7 @@ mod tests {
|
||||
],
|
||||
);
|
||||
|
||||
let project = Project::test(fs, [Path::new(path!("/a"))], cx).await;
|
||||
let project = Project::test(fs, [Path::new("/a")], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
|
||||
|
||||
@@ -1706,6 +1711,7 @@ mod tests {
|
||||
));
|
||||
}
|
||||
|
||||
#[cfg_attr(windows, ignore = "currently fails on windows")]
|
||||
#[gpui::test]
|
||||
async fn test_excerpts_splitting_after_restoring_the_middle_excerpt(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -1745,7 +1751,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/a"),
|
||||
"/a",
|
||||
json!({
|
||||
".git": {},
|
||||
"main.rs": buffer_contents,
|
||||
@@ -1754,11 +1760,11 @@ mod tests {
|
||||
.await;
|
||||
|
||||
fs.set_head_and_index_for_repo(
|
||||
Path::new(path!("/a/.git")),
|
||||
Path::new("/a/.git"),
|
||||
&[("main.rs", git_contents.to_owned())],
|
||||
);
|
||||
|
||||
let project = Project::test(fs, [Path::new(path!("/a"))], cx).await;
|
||||
let project = Project::test(fs, [Path::new("/a")], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
|
||||
|
||||
@@ -1923,7 +1929,6 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
let editor = diff.read_with(cx, |diff, _| diff.editor.clone());
|
||||
|
||||
assert_state_with_diff(
|
||||
&editor,
|
||||
cx,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use picker::{Picker, PickerDelegate, PickerEditorPosition};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Project, git_store::Repository};
|
||||
use std::sync::Arc;
|
||||
use ui::{ListItem, ListItemSpacing, prelude::*};
|
||||
@@ -36,11 +36,11 @@ impl RepositorySelector {
|
||||
) -> Self {
|
||||
let git_store = project_handle.read(cx).git_store().clone();
|
||||
let repository_entries = git_store.update(cx, |git_store, _cx| {
|
||||
let mut repos: Vec<_> = git_store.repositories().values().cloned().collect();
|
||||
|
||||
repos.sort_by_key(|a| a.read(_cx).display_name());
|
||||
|
||||
repos
|
||||
git_store
|
||||
.repositories()
|
||||
.values()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let filtered_repositories = repository_entries.clone();
|
||||
|
||||
@@ -59,7 +59,7 @@ impl RepositorySelector {
|
||||
};
|
||||
|
||||
let picker = cx.new(|cx| {
|
||||
Picker::uniform_list(delegate, window, cx)
|
||||
Picker::nonsearchable_uniform_list(delegate, window, cx)
|
||||
.widest_item(widest_item_ix)
|
||||
.max_height(Some(rems(20.).into()))
|
||||
});
|
||||
@@ -158,10 +158,6 @@ impl PickerDelegate for RepositorySelectorDelegate {
|
||||
"Select a repository...".into()
|
||||
}
|
||||
|
||||
fn editor_position(&self) -> PickerEditorPosition {
|
||||
PickerEditorPosition::End
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
@@ -170,31 +166,25 @@ impl PickerDelegate for RepositorySelectorDelegate {
|
||||
) -> Task<()> {
|
||||
let all_repositories = self.repository_entries.clone();
|
||||
|
||||
let repo_names: Vec<(Entity<Repository>, String)> = all_repositories
|
||||
.iter()
|
||||
.map(|repo| (repo.clone(), repo.read(cx).display_name().to_lowercase()))
|
||||
.collect();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let filtered_repositories = cx
|
||||
.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
all_repositories
|
||||
} else {
|
||||
let query_lower = query.to_lowercase();
|
||||
repo_names
|
||||
all_repositories
|
||||
.into_iter()
|
||||
.filter(|(_, display_name)| display_name.contains(&query_lower))
|
||||
.map(|(repo, _)| repo)
|
||||
.filter(|_repo_info| {
|
||||
// TODO: Implement repository filtering logic
|
||||
true
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let mut sorted_repositories = filtered_repositories;
|
||||
sorted_repositories.sort_by_key(|a| a.read(cx).display_name());
|
||||
this.delegate.filtered_repositories = sorted_repositories;
|
||||
this.delegate.filtered_repositories = filtered_repositories;
|
||||
this.delegate.set_selected_index(0, window, cx);
|
||||
cx.notify();
|
||||
})
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user