Compare commits
153 Commits
rayon-over
...
slash-comm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aca84668b0 | ||
|
|
4fe31d7f40 | ||
|
|
9ae3de2732 | ||
|
|
21e75b8221 | ||
|
|
978951b79a | ||
|
|
6b980ecad3 | ||
|
|
d9c7f44b0b | ||
|
|
55e68553a4 | ||
|
|
9fe46dc8d2 | ||
|
|
aced13bc9f | ||
|
|
2859cbdba9 | ||
|
|
4443f61c16 | ||
|
|
f0f0beb42f | ||
|
|
6707ff3b50 | ||
|
|
93770e8314 | ||
|
|
f8c617303a | ||
|
|
e5f05a21ce | ||
|
|
f499504b13 | ||
|
|
504216cbbf | ||
|
|
3bf71c690f | ||
|
|
456ba32ea7 | ||
|
|
9aeb617a89 | ||
|
|
fd8bae9b72 | ||
|
|
f71c9122ca | ||
|
|
8441aa49b2 | ||
|
|
7b96e1cf1a | ||
|
|
86322a186f | ||
|
|
1b94d74dc3 | ||
|
|
db825c1141 | ||
|
|
f3abd1dab5 | ||
|
|
662ec9977f | ||
|
|
3ab5103de1 | ||
|
|
39bd03b92d | ||
|
|
1fffcb99ba | ||
|
|
e4f90b5da2 | ||
|
|
dc6fad9659 | ||
|
|
64c289a9a2 | ||
|
|
a08897ff30 | ||
|
|
d359a814f8 | ||
|
|
4c35274b6e | ||
|
|
bf48a95344 | ||
|
|
7c3a21f732 | ||
|
|
af630be7ca | ||
|
|
dbd8efe129 | ||
|
|
3afbe836a1 | ||
|
|
d8709f2107 | ||
|
|
df7bc8200d | ||
|
|
8575972a07 | ||
|
|
40c417f9c3 | ||
|
|
7c2cf86dd9 | ||
|
|
126ed6fbdd | ||
|
|
6f4381b39d | ||
|
|
6fbbdb3512 | ||
|
|
179fb21778 | ||
|
|
6584fb23e3 | ||
|
|
d8698dffe3 | ||
|
|
bf44dc5ff5 | ||
|
|
d85b6a1544 | ||
|
|
702e618bba | ||
|
|
1029d3c301 | ||
|
|
97f552876c | ||
|
|
63c081d456 | ||
|
|
6970ab2040 | ||
|
|
e42dfb4387 | ||
|
|
ec202a26c8 | ||
|
|
f17096879c | ||
|
|
fb343a7743 | ||
|
|
a49b2d5bf8 | ||
|
|
b5d57598b6 | ||
|
|
b9d9602074 | ||
|
|
cc19f66ee1 | ||
|
|
62f90fec77 | ||
|
|
86ebb1890d | ||
|
|
dd5099ac28 | ||
|
|
c95b88d546 | ||
|
|
c217f6bd36 | ||
|
|
3314de8175 | ||
|
|
6b907bd102 | ||
|
|
3cb933ddb1 | ||
|
|
cf5362ffd1 | ||
|
|
74ac5ece6a | ||
|
|
f107708de3 | ||
|
|
4940e53d23 | ||
|
|
ab79fa440d | ||
|
|
c9b7df4113 | ||
|
|
f2df49764e | ||
|
|
77cc55656e | ||
|
|
1c85995ed7 | ||
|
|
d1543f75b6 | ||
|
|
fc0b249136 | ||
|
|
01dbc68f82 | ||
|
|
e111acad33 | ||
|
|
c61409e577 | ||
|
|
1659fb81e7 | ||
|
|
dd6c653fe9 | ||
|
|
a13e84a108 | ||
|
|
1cac3e3e40 | ||
|
|
9abe5811a5 | ||
|
|
97bd2846e9 | ||
|
|
e9244d50a7 | ||
|
|
83e5a3033e | ||
|
|
94a4c0c352 | ||
|
|
0f8693386a | ||
|
|
ed269b4467 | ||
|
|
34ddf5466f | ||
|
|
a701388cb7 | ||
|
|
29afc0412e | ||
|
|
e65a9291ef | ||
|
|
a53faff412 | ||
|
|
074cb88036 | ||
|
|
67ebb1f795 | ||
|
|
ace617037f | ||
|
|
43061b6b16 | ||
|
|
e23e976e58 | ||
|
|
0266a995aa | ||
|
|
9741e9ab8b | ||
|
|
3f31fc2874 | ||
|
|
6c50fd6de9 | ||
|
|
df43a2d3b1 | ||
|
|
35749e99e5 | ||
|
|
e965c43703 | ||
|
|
14fc726cae | ||
|
|
4f95186b53 | ||
|
|
33f44009de | ||
|
|
9d895c5ea7 | ||
|
|
0811d48a7a | ||
|
|
d8cafdf937 | ||
|
|
95190a2034 | ||
|
|
49335d54be | ||
|
|
624e448492 | ||
|
|
bf9dd6bbef | ||
|
|
6af385235d | ||
|
|
cc19387853 | ||
|
|
5922f4adce | ||
|
|
cac920d992 | ||
|
|
773850f477 | ||
|
|
9c60bc3837 | ||
|
|
fbb4dcf2b1 | ||
|
|
2ccadc7f65 | ||
|
|
80989d6767 | ||
|
|
719013dae6 | ||
|
|
8af3f583c2 | ||
|
|
f1d80b715a | ||
|
|
42ef3e5d3d | ||
|
|
90ea252c82 | ||
|
|
6e5ff6d091 | ||
|
|
04216a88f3 | ||
|
|
3ae65153db | ||
|
|
ffc9060607 | ||
|
|
4fc4707cfc | ||
|
|
8662025d12 | ||
|
|
ceddd5752a | ||
|
|
20166727a6 |
@@ -37,8 +37,6 @@ workspace-members = [
|
||||
"zed_glsl",
|
||||
"zed_html",
|
||||
"zed_proto",
|
||||
"zed_ruff",
|
||||
"slash_commands_example",
|
||||
"zed_snippets",
|
||||
"zed_test_extension",
|
||||
]
|
||||
|
||||
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -826,8 +826,9 @@ jobs:
|
||||
timeout-minutes: 120
|
||||
name: Create a Windows installer
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
# if: (startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [windows_tests]
|
||||
env:
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
@@ -870,8 +871,7 @@ jobs:
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
# Re-enable when we are ready to publish windows preview releases
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) && env.RELEASE_CHANNEL == 'preview' }} # upload only preview
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
send_release_notes_email:
|
||||
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
33
.github/workflows/issue_response.yml
vendored
33
.github/workflows/issue_response.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: Issue Response
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 12 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
issue-response:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: "script/issue_response/pnpm-lock.yaml"
|
||||
|
||||
- run: pnpm install --dir script/issue_response
|
||||
|
||||
- name: Run Issue Response
|
||||
run: pnpm run --dir script/issue_response start
|
||||
env:
|
||||
ISSUE_RESPONSE_GITHUB_TOKEN: ${{ secrets.ISSUE_RESPONSE_GITHUB_TOKEN }}
|
||||
SLACK_ISSUE_RESPONSE_WEBHOOK_URL: ${{ secrets.SLACK_ISSUE_RESPONSE_WEBHOOK_URL }}
|
||||
129
Cargo.lock
generated
129
Cargo.lock
generated
@@ -318,9 +318,11 @@ dependencies = [
|
||||
"smol",
|
||||
"task",
|
||||
"tempfile",
|
||||
"terminal",
|
||||
"thiserror 2.0.12",
|
||||
"ui",
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -419,7 +421,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"shlex",
|
||||
"smol",
|
||||
"streaming_diff",
|
||||
"task",
|
||||
@@ -516,7 +517,7 @@ dependencies = [
|
||||
"rustix-openpty",
|
||||
"serde",
|
||||
"signal-hook",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width",
|
||||
"vte",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
@@ -1411,7 +1412,6 @@ dependencies = [
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rodio",
|
||||
"rubato",
|
||||
"serde",
|
||||
"settings",
|
||||
"smol",
|
||||
@@ -2308,14 +2308,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "blade-graphics"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e4deb8f595ce7f00dee3543ebf6fd9a20ea86fc421ab79600dac30876250bdae"
|
||||
dependencies = [
|
||||
"ash",
|
||||
"ash-window",
|
||||
"bitflags 2.9.0",
|
||||
"bytemuck",
|
||||
"codespan-reporting 0.11.1",
|
||||
"codespan-reporting",
|
||||
"glow",
|
||||
"gpu-alloc",
|
||||
"gpu-alloc-ash",
|
||||
@@ -2333,6 +2334,7 @@ dependencies = [
|
||||
"objc2-metal",
|
||||
"objc2-quartz-core",
|
||||
"objc2-ui-kit",
|
||||
"once_cell",
|
||||
"raw-window-handle",
|
||||
"slab",
|
||||
"wasm-bindgen",
|
||||
@@ -2342,7 +2344,8 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "blade-macros"
|
||||
version = "0.3.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "27142319e2f4c264581067eaccb9f80acccdde60d8b4bf57cc50cd3152f109ca"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2351,8 +2354,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "blade-util"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a6be3a82c001ba7a17b6f8e413ede5d1004e6047213f8efaf0ffc15b5c4904c"
|
||||
dependencies = [
|
||||
"blade-graphics",
|
||||
"bytemuck",
|
||||
@@ -3081,6 +3085,7 @@ name = "cli"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askpass",
|
||||
"clap",
|
||||
"collections",
|
||||
"core-foundation 0.10.0",
|
||||
@@ -3217,6 +3222,7 @@ dependencies = [
|
||||
"indoc",
|
||||
"ordered-float 2.10.1",
|
||||
"rustc-hash 2.1.1",
|
||||
"serde",
|
||||
"strum 0.27.1",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -3296,16 +3302,6 @@ dependencies = [
|
||||
"objc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codespan-reporting"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
|
||||
dependencies = [
|
||||
"termcolor",
|
||||
"unicode-width 0.1.14",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codespan-reporting"
|
||||
version = "0.12.0"
|
||||
@@ -3314,7 +3310,7 @@ checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"termcolor",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3390,7 +3386,6 @@ dependencies = [
|
||||
"reqwest 0.11.27",
|
||||
"reqwest_client",
|
||||
"rpc",
|
||||
"rustc-demangle",
|
||||
"scrypt",
|
||||
"sea-orm",
|
||||
"semantic_version",
|
||||
@@ -3583,7 +3578,7 @@ dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -4112,9 +4107,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crc"
|
||||
version = "3.2.1"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
|
||||
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
|
||||
dependencies = [
|
||||
"crc-catalog",
|
||||
]
|
||||
@@ -4373,7 +4368,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b4400e26ea4b99417e4263b1ce2d8452404d750ba0809a7bd043072593d430d"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"codespan-reporting 0.12.0",
|
||||
"codespan-reporting",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"scratch",
|
||||
@@ -4387,7 +4382,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "31860c98f69fc14da5742c5deaf78983e846c7b27804ca8c8319e32eef421bde"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"codespan-reporting 0.12.0",
|
||||
"codespan-reporting",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
@@ -5178,6 +5173,7 @@ dependencies = [
|
||||
"language",
|
||||
"log",
|
||||
"ordered-float 2.10.1",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"regex",
|
||||
@@ -6943,9 +6939,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "glow"
|
||||
version = "0.14.2"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d51fa363f025f5c111e03f13eda21162faeacb6911fe8caa0c0349f9cf0c4483"
|
||||
checksum = "c5e5ea60d70410161c8bf5da3fdfeaa1c72ed2c15f8bbb9d19fe3a4fad085f08"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"slotmap",
|
||||
@@ -7563,6 +7559,9 @@ name = "http_client"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-compression",
|
||||
"async-fs",
|
||||
"async-tar",
|
||||
"bytes 1.10.1",
|
||||
"derive_more",
|
||||
"futures 0.3.31",
|
||||
@@ -7573,7 +7572,10 @@ dependencies = [
|
||||
"reqwest 0.12.15 (git+https://github.com/zed-industries/reqwest.git?rev=951c770a32f1998d6e999cef3e59e0013e6c4415)",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"url",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -8844,11 +8846,9 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"sha2",
|
||||
"shlex",
|
||||
"smol",
|
||||
"task",
|
||||
"tempfile",
|
||||
"text",
|
||||
"theme",
|
||||
"toml 0.8.20",
|
||||
@@ -9694,9 +9694,13 @@ dependencies = [
|
||||
"convert_case 0.8.0",
|
||||
"log",
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"streaming-iterator",
|
||||
"tree-sitter",
|
||||
"tree-sitter-json",
|
||||
"unindent",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -9927,7 +9931,7 @@ dependencies = [
|
||||
"bit-set 0.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cfg_aliases 0.2.1",
|
||||
"codespan-reporting 0.12.0",
|
||||
"codespan-reporting",
|
||||
"half",
|
||||
"hashbrown 0.15.3",
|
||||
"hexf-parse",
|
||||
@@ -10636,6 +10640,7 @@ dependencies = [
|
||||
"telemetry",
|
||||
"theme",
|
||||
"ui",
|
||||
"ui_input",
|
||||
"util",
|
||||
"vim_mode_setting",
|
||||
"workspace",
|
||||
@@ -12099,6 +12104,7 @@ dependencies = [
|
||||
"dap_adapters",
|
||||
"extension",
|
||||
"fancy-regex 0.14.0",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
@@ -12163,6 +12169,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"criterion",
|
||||
"db",
|
||||
"editor",
|
||||
"file_icons",
|
||||
@@ -12173,6 +12180,7 @@ dependencies = [
|
||||
"menu",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"rayon",
|
||||
"schemars 1.0.1",
|
||||
"search",
|
||||
"serde",
|
||||
@@ -13511,18 +13519,6 @@ version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad8388ea1a9e0ea807e442e8263a699e7edcb320ecbcd21b4fa8ff859acce3ba"
|
||||
|
||||
[[package]]
|
||||
name = "rubato"
|
||||
version = "0.16.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5258099699851cfd0082aeb645feb9c084d9a5e1f1b8d5372086b989fc5e56a1"
|
||||
dependencies = [
|
||||
"num-complex",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"realfft",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rules_library"
|
||||
version = "0.1.0"
|
||||
@@ -13612,9 +13608,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust_decimal"
|
||||
version = "1.37.1"
|
||||
version = "1.38.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "faa7de2ba56ac291bd90c6b9bece784a52ae1411f9506544b3eae36dd2356d50"
|
||||
checksum = "c8975fc98059f365204d635119cf9c5a60ae67b841ed49b5422a9a7e56cdfac0"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"borsh",
|
||||
@@ -14549,11 +14545,13 @@ dependencies = [
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"language",
|
||||
"menu",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"serde",
|
||||
"session",
|
||||
@@ -14586,9 +14584,9 @@ checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.8"
|
||||
version = "0.10.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
||||
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
@@ -16915,9 +16913,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.6"
|
||||
version = "0.25.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0"
|
||||
checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -17328,6 +17326,7 @@ dependencies = [
|
||||
"component",
|
||||
"editor",
|
||||
"gpui",
|
||||
"menu",
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
@@ -17440,12 +17439,6 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1d386ff53b415b7fe27b50bb44679e2cc4660272694b7b6f3326d8480823a94"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.2.0"
|
||||
@@ -17720,6 +17713,7 @@ dependencies = [
|
||||
"language",
|
||||
"log",
|
||||
"lsp",
|
||||
"menu",
|
||||
"multi_buffer",
|
||||
"nvim-rs",
|
||||
"parking_lot",
|
||||
@@ -19697,12 +19691,11 @@ dependencies = [
|
||||
"cipher",
|
||||
"clap",
|
||||
"clap_builder",
|
||||
"codespan-reporting 0.12.0",
|
||||
"codespan-reporting",
|
||||
"concurrent-queue",
|
||||
"core-foundation 0.9.4",
|
||||
"core-foundation-sys",
|
||||
"cranelift-codegen",
|
||||
"crc32fast",
|
||||
"crossbeam-channel",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
@@ -20217,7 +20210,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.207.0"
|
||||
version = "0.208.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -20226,7 +20219,6 @@ dependencies = [
|
||||
"agent_ui",
|
||||
"anyhow",
|
||||
"ashpd 0.11.0",
|
||||
"askpass",
|
||||
"assets",
|
||||
"assistant_tools",
|
||||
"audio",
|
||||
@@ -20352,7 +20344,6 @@ dependencies = [
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"util_macros",
|
||||
"uuid",
|
||||
"vim",
|
||||
"vim_mode_setting",
|
||||
@@ -20443,21 +20434,6 @@ dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zed_ruff"
|
||||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zed_snippets"
|
||||
version = "0.0.6"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zed_test_extension"
|
||||
version = "0.1.0"
|
||||
@@ -20741,7 +20717,9 @@ dependencies = [
|
||||
"language_model",
|
||||
"language_models",
|
||||
"languages",
|
||||
"log",
|
||||
"node_runtime",
|
||||
"ordered-float 2.10.1",
|
||||
"paths",
|
||||
"project",
|
||||
"prompt_store",
|
||||
@@ -20758,6 +20736,7 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
"zeta",
|
||||
"zeta2",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
12
Cargo.toml
12
Cargo.toml
@@ -212,9 +212,7 @@ members = [
|
||||
"extensions/glsl",
|
||||
"extensions/html",
|
||||
"extensions/proto",
|
||||
"extensions/ruff",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/test-extension",
|
||||
|
||||
#
|
||||
@@ -474,9 +472,9 @@ backtrace = "0.3"
|
||||
base64 = "0.22"
|
||||
bincode = "1.2.1"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
@@ -550,6 +548,7 @@ nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
num-traits = "0.2"
|
||||
objc = "0.2"
|
||||
objc2-foundation = { version = "0.3", default-features = false, features = [
|
||||
"NSArray",
|
||||
@@ -620,7 +619,6 @@ runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rustc-hash = "2.1.0"
|
||||
rustls = { version = "0.23.26" }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
@@ -669,7 +667,7 @@ tokio = { version = "1" }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
|
||||
toml = "0.8"
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.6", features = ["wasm"] }
|
||||
tree-sitter = { version = "0.25.10", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" }
|
||||
|
||||
3
assets/icons/paperclip.svg
Normal file
3
assets/icons/paperclip.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.1645 4.45825L5.20344 9.52074C4.98225 9.74193 4.85798 10.0419 4.85798 10.3548C4.85798 10.6676 4.98225 10.9676 5.20344 11.1888C5.42464 11.41 5.72464 11.5342 6.03746 11.5342C6.35028 11.5342 6.65028 11.41 6.87148 11.1888L11.8326 6.12629C12.2749 5.68397 12.5234 5.08407 12.5234 4.45854C12.5234 3.83302 12.2749 3.23311 11.8326 2.7908C11.3902 2.34849 10.7903 2.1 10.1648 2.1C9.53928 2.1 8.93938 2.34849 8.49707 2.7908L3.55663 7.83265C3.22373 8.16017 2.95897 8.55037 2.77762 8.98072C2.59628 9.41108 2.50193 9.87308 2.50003 10.3401C2.49813 10.8071 2.58871 11.2698 2.76654 11.7017C2.94438 12.1335 3.20595 12.5258 3.53618 12.856C3.8664 13.1863 4.25873 13.4478 4.69055 13.6257C5.12237 13.8035 5.58513 13.8941 6.05213 13.8922C6.51913 13.8903 6.98114 13.7959 7.41149 13.6146C7.84185 13.4332 8.23204 13.1685 8.55957 12.8356L13.5 7.79373" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
@@ -250,7 +250,7 @@
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"ctrl-y": "agent::AllowOnce",
|
||||
"ctrl-alt-y": "agent::AllowAlways",
|
||||
"ctrl-d": "agent::RejectOnce"
|
||||
"ctrl-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -289,7 +289,7 @@
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"cmd-y": "agent::AllowOnce",
|
||||
"cmd-alt-y": "agent::AllowAlways",
|
||||
"cmd-d": "agent::RejectOnce"
|
||||
"cmd-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -251,7 +251,7 @@
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"ctrl-y": "agent::AllowOnce",
|
||||
"ctrl-alt-y": "agent::AllowAlways",
|
||||
"ctrl-d": "agent::RejectOnce"
|
||||
"ctrl-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -240,6 +240,7 @@
|
||||
"delete": "vim::DeleteRight",
|
||||
"g shift-j": "vim::JoinLinesNoWhitespace",
|
||||
"y": "vim::PushYank",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"x": "vim::DeleteRight",
|
||||
"shift-x": "vim::DeleteLeft",
|
||||
"ctrl-a": "vim::Increment",
|
||||
@@ -392,7 +393,7 @@
|
||||
"escape": "editor::Cancel",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"shift-y": "vim::YankLine",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"shift-a": "vim::InsertEndOfLine",
|
||||
"o": "vim::InsertLineBelow",
|
||||
@@ -883,10 +884,12 @@
|
||||
"/": "project_panel::NewSearchInDirectory",
|
||||
"d": "project_panel::NewDirectory",
|
||||
"enter": "project_panel::OpenPermanent",
|
||||
"escape": "project_panel::ToggleFocus",
|
||||
"escape": "vim::ToggleProjectPanelFocus",
|
||||
"h": "project_panel::CollapseSelectedEntry",
|
||||
"j": "menu::SelectNext",
|
||||
"k": "menu::SelectPrevious",
|
||||
"j": "vim::MenuSelectNext",
|
||||
"k": "vim::MenuSelectPrevious",
|
||||
"down": "vim::MenuSelectNext",
|
||||
"up": "vim::MenuSelectPrevious",
|
||||
"l": "project_panel::ExpandSelectedEntry",
|
||||
"shift-d": "project_panel::Delete",
|
||||
"shift-r": "project_panel::Rename",
|
||||
@@ -905,7 +908,22 @@
|
||||
"{": "project_panel::SelectPrevDirectory",
|
||||
"shift-g": "menu::SelectLast",
|
||||
"g g": "menu::SelectFirst",
|
||||
"-": "project_panel::SelectParent"
|
||||
"-": "project_panel::SelectParent",
|
||||
"ctrl-u": "project_panel::ScrollUp",
|
||||
"ctrl-d": "project_panel::ScrollDown",
|
||||
"z t": "project_panel::ScrollCursorTop",
|
||||
"z z": "project_panel::ScrollCursorCenter",
|
||||
"z b": "project_panel::ScrollCursorBottom",
|
||||
"0": ["vim::Number", 0],
|
||||
"1": ["vim::Number", 1],
|
||||
"2": ["vim::Number", 2],
|
||||
"3": ["vim::Number", 3],
|
||||
"4": ["vim::Number", 4],
|
||||
"5": ["vim::Number", 5],
|
||||
"6": ["vim::Number", 6],
|
||||
"7": ["vim::Number", 7],
|
||||
"8": ["vim::Number", 8],
|
||||
"9": ["vim::Number", 9]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -29,7 +29,9 @@ Generate {{content_type}} based on the following prompt:
|
||||
|
||||
Match the indentation in the original file in the inserted {{content_type}}, don't include any indentation on blank lines.
|
||||
|
||||
Immediately start with the following format with no remarks:
|
||||
Return ONLY the {{content_type}} to insert. Do NOT include any XML tags like <document>, <insert_here>, or any surrounding markup from the input.
|
||||
|
||||
Respond with a code block containing the {{content_type}} to insert. Replace \{{INSERTED_CODE}} with your actual {{content_type}}:
|
||||
|
||||
```
|
||||
\{{INSERTED_CODE}}
|
||||
@@ -66,7 +68,9 @@ Only make changes that are necessary to fulfill the prompt, leave everything els
|
||||
|
||||
Start at the indentation level in the original file in the rewritten {{content_type}}. Don't stop until you've rewritten the entire section, even if you have no more changes to make, always write out the whole section with no unnecessary elisions.
|
||||
|
||||
Immediately start with the following format with no remarks:
|
||||
Return ONLY the rewritten {{content_type}}. Do NOT include any XML tags like <document>, <rewrite_this>, or any surrounding markup from the input.
|
||||
|
||||
Respond with a code block containing the rewritten {{content_type}}. Replace \{{REWRITTEN_CODE}} with your actual rewritten {{content_type}}:
|
||||
|
||||
```
|
||||
\{{REWRITTEN_CODE}}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
{
|
||||
"project_name": null,
|
||||
/// The displayed name of this project. If not set or empty, the root directory name
|
||||
/// will be displayed.
|
||||
"project_name": "",
|
||||
// The name of the Zed theme to use for the UI.
|
||||
//
|
||||
// `mode` is one of:
|
||||
@@ -72,8 +74,10 @@
|
||||
"ui_font_weight": 400,
|
||||
// The default font size for text in the UI
|
||||
"ui_font_size": 16,
|
||||
// The default font size for text in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_font_size": null,
|
||||
// The default font size for agent responses in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_ui_font_size": null,
|
||||
// The default font size for user messages in the agent panel. Falls back to the buffer font size if unset.
|
||||
"agent_buffer_font_size": 12,
|
||||
// How much to fade out unused code.
|
||||
"unnecessary_code_fade": 0.3,
|
||||
// Active pane styling settings.
|
||||
@@ -1242,6 +1246,9 @@
|
||||
// The minimum column number to show the inline blame information at
|
||||
"min_column": 0
|
||||
},
|
||||
"blame": {
|
||||
"show_avatar": true
|
||||
},
|
||||
// Control which information is shown in the branch picker.
|
||||
"branch_picker": {
|
||||
"show_author_name": true
|
||||
@@ -1322,6 +1329,8 @@
|
||||
},
|
||||
// Status bar-related settings.
|
||||
"status_bar": {
|
||||
// Whether to show the status bar.
|
||||
"experimental.show": true,
|
||||
// Whether to show the active language button in the status bar.
|
||||
"active_language_button": true,
|
||||
// Whether to show the cursor position button in the status bar.
|
||||
@@ -1557,6 +1566,14 @@
|
||||
"auto_install_extensions": {
|
||||
"html": true
|
||||
},
|
||||
// The capabilities granted to extensions.
|
||||
//
|
||||
// This list can be customized to restrict what extensions are able to do.
|
||||
"granted_extension_capabilities": [
|
||||
{ "kind": "process:exec", "command": "*", "args": ["**"] },
|
||||
{ "kind": "download_file", "host": "*", "path": ["**"] },
|
||||
{ "kind": "npm:install", "package": "*" }
|
||||
],
|
||||
// Controls how completions are processed for this language.
|
||||
"completions": {
|
||||
// Controls how words are completed.
|
||||
@@ -2019,7 +2036,7 @@
|
||||
// Examples:
|
||||
// "profiles": {
|
||||
// "Presenting": {
|
||||
// "agent_font_size": 20.0,
|
||||
// "agent_ui_font_size": 20.0,
|
||||
// "buffer_font_size": 20.0,
|
||||
// "theme": "One Light",
|
||||
// "ui_font_size": 20.0
|
||||
|
||||
@@ -192,7 +192,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"comment": {
|
||||
"color": "#abb5be8c",
|
||||
"color": "#5c6773ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -583,7 +583,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"comment": {
|
||||
"color": "#787b8099",
|
||||
"color": "#abb0b6ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -630,7 +630,7 @@
|
||||
"hint": {
|
||||
"color": "#8ca7c2ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fa8d3eff",
|
||||
@@ -974,7 +974,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"comment": {
|
||||
"color": "#b8cfe680",
|
||||
"color": "#5c6773ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -1021,7 +1021,7 @@
|
||||
"hint": {
|
||||
"color": "#7399a3ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#ffad65ff",
|
||||
|
||||
@@ -653,7 +653,7 @@
|
||||
"hint": {
|
||||
"color": "#8c957dff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fb4833ff",
|
||||
@@ -1058,7 +1058,7 @@
|
||||
"hint": {
|
||||
"color": "#8c957dff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fb4833ff",
|
||||
@@ -1463,7 +1463,7 @@
|
||||
"hint": {
|
||||
"color": "#677562ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#9d0006ff",
|
||||
@@ -1868,7 +1868,7 @@
|
||||
"hint": {
|
||||
"color": "#677562ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#9d0006ff",
|
||||
@@ -2273,7 +2273,7 @@
|
||||
"hint": {
|
||||
"color": "#677562ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#9d0006ff",
|
||||
|
||||
@@ -643,7 +643,7 @@
|
||||
"hint": {
|
||||
"color": "#7274a7ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#a449abff",
|
||||
|
||||
@@ -3,6 +3,7 @@ mod diff;
|
||||
mod mention;
|
||||
mod terminal;
|
||||
|
||||
use ::terminal::terminal_settings::TerminalSettings;
|
||||
use agent_settings::AgentSettings;
|
||||
use collections::HashSet;
|
||||
pub use connection::*;
|
||||
@@ -787,6 +788,8 @@ pub struct AcpThread {
|
||||
prompt_capabilities: acp::PromptCapabilities,
|
||||
_observe_prompt_capabilities: Task<anyhow::Result<()>>,
|
||||
terminals: HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
pending_terminal_output: HashMap<acp::TerminalId, Vec<Vec<u8>>>,
|
||||
pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -809,6 +812,126 @@ pub enum AcpThreadEvent {
|
||||
|
||||
impl EventEmitter<AcpThreadEvent> for AcpThread {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TerminalProviderEvent {
|
||||
Created {
|
||||
terminal_id: acp::TerminalId,
|
||||
label: String,
|
||||
cwd: Option<PathBuf>,
|
||||
output_byte_limit: Option<u64>,
|
||||
terminal: Entity<::terminal::Terminal>,
|
||||
},
|
||||
Output {
|
||||
terminal_id: acp::TerminalId,
|
||||
data: Vec<u8>,
|
||||
},
|
||||
TitleChanged {
|
||||
terminal_id: acp::TerminalId,
|
||||
title: String,
|
||||
},
|
||||
Exit {
|
||||
terminal_id: acp::TerminalId,
|
||||
status: acp::TerminalExitStatus,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TerminalProviderCommand {
|
||||
WriteInput {
|
||||
terminal_id: acp::TerminalId,
|
||||
bytes: Vec<u8>,
|
||||
},
|
||||
Resize {
|
||||
terminal_id: acp::TerminalId,
|
||||
cols: u16,
|
||||
rows: u16,
|
||||
},
|
||||
Close {
|
||||
terminal_id: acp::TerminalId,
|
||||
},
|
||||
}
|
||||
|
||||
impl AcpThread {
|
||||
pub fn on_terminal_provider_event(
|
||||
&mut self,
|
||||
event: TerminalProviderEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id,
|
||||
label,
|
||||
cwd,
|
||||
output_byte_limit,
|
||||
terminal,
|
||||
} => {
|
||||
let entity = self.register_terminal_created(
|
||||
terminal_id.clone(),
|
||||
label,
|
||||
cwd,
|
||||
output_byte_limit,
|
||||
terminal,
|
||||
cx,
|
||||
);
|
||||
|
||||
if let Some(mut chunks) = self.pending_terminal_output.remove(&terminal_id) {
|
||||
for data in chunks.drain(..) {
|
||||
entity.update(cx, |term, cx| {
|
||||
term.inner().update(cx, |inner, cx| {
|
||||
inner.write_output(&data, cx);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(_status) = self.pending_terminal_exit.remove(&terminal_id) {
|
||||
entity.update(cx, |_term, cx| {
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
TerminalProviderEvent::Output { terminal_id, data } => {
|
||||
if let Some(entity) = self.terminals.get(&terminal_id) {
|
||||
entity.update(cx, |term, cx| {
|
||||
term.inner().update(cx, |inner, cx| {
|
||||
inner.write_output(&data, cx);
|
||||
})
|
||||
});
|
||||
} else {
|
||||
self.pending_terminal_output
|
||||
.entry(terminal_id)
|
||||
.or_default()
|
||||
.push(data);
|
||||
}
|
||||
}
|
||||
TerminalProviderEvent::TitleChanged { terminal_id, title } => {
|
||||
if let Some(entity) = self.terminals.get(&terminal_id) {
|
||||
entity.update(cx, |term, cx| {
|
||||
term.inner().update(cx, |inner, cx| {
|
||||
inner.breadcrumb_text = title;
|
||||
cx.emit(::terminal::Event::BreadcrumbsChanged);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id,
|
||||
status,
|
||||
} => {
|
||||
if let Some(entity) = self.terminals.get(&terminal_id) {
|
||||
entity.update(cx, |_term, cx| {
|
||||
cx.notify();
|
||||
});
|
||||
} else {
|
||||
self.pending_terminal_exit.insert(terminal_id, status);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub enum ThreadStatus {
|
||||
Idle,
|
||||
@@ -886,6 +1009,8 @@ impl AcpThread {
|
||||
prompt_capabilities,
|
||||
_observe_prompt_capabilities: task,
|
||||
terminals: HashMap::default(),
|
||||
pending_terminal_output: HashMap::default(),
|
||||
pending_terminal_exit: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1961,16 +2086,15 @@ impl AcpThread {
|
||||
) -> Task<Result<Entity<Terminal>>> {
|
||||
let env = match &cwd {
|
||||
Some(dir) => self.project.update(cx, |project, cx| {
|
||||
project.directory_environment(dir.as_path().into(), cx)
|
||||
let shell = TerminalSettings::get_global(cx).shell.clone();
|
||||
project.directory_environment(&shell, dir.as_path().into(), cx)
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
|
||||
let env = cx.spawn(async move |_, _| {
|
||||
let mut env = env.await.unwrap_or_default();
|
||||
if cfg!(unix) {
|
||||
env.insert("PAGER".into(), "cat".into());
|
||||
}
|
||||
// Disables paging for `git` and hopefully other commands
|
||||
env.insert("PAGER".into(), "".into());
|
||||
for var in extra_env {
|
||||
env.insert(var.name, var.value);
|
||||
}
|
||||
@@ -1985,18 +2109,16 @@ impl AcpThread {
|
||||
let terminal_id = terminal_id.clone();
|
||||
async move |_this, cx| {
|
||||
let env = env.await;
|
||||
let (task_command, task_args) = ShellBuilder::new(
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})?
|
||||
.as_deref(),
|
||||
&Shell::Program(get_default_system_shell()),
|
||||
)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let shell = project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})?
|
||||
.unwrap_or_else(|| get_default_system_shell());
|
||||
let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let terminal = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_terminal_task(
|
||||
@@ -2079,6 +2201,32 @@ impl AcpThread {
|
||||
pub fn emit_load_error(&mut self, error: LoadError, cx: &mut Context<Self>) {
|
||||
cx.emit(AcpThreadEvent::LoadError(error));
|
||||
}
|
||||
|
||||
pub fn register_terminal_created(
|
||||
&mut self,
|
||||
terminal_id: acp::TerminalId,
|
||||
command_label: String,
|
||||
working_dir: Option<PathBuf>,
|
||||
output_byte_limit: Option<u64>,
|
||||
terminal: Entity<::terminal::Terminal>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<Terminal> {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
|
||||
let entity = cx.new(|cx| {
|
||||
Terminal::new(
|
||||
terminal_id.clone(),
|
||||
&command_label,
|
||||
working_dir.clone(),
|
||||
output_byte_limit.map(|l| l as usize),
|
||||
terminal,
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
self.terminals.insert(terminal_id.clone(), entity.clone());
|
||||
entity
|
||||
}
|
||||
}
|
||||
|
||||
fn markdown_for_raw_output(
|
||||
@@ -2155,6 +2303,145 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_terminal_output_buffered_before_created_renders(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, std::path::Path::new(path!("/test")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
|
||||
// Send Output BEFORE Created - should be buffered by acp_thread
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data: b"hello buffered".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Create a display-only terminal and then send Created
|
||||
let lower = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: "Buffered Test".to_string(),
|
||||
cwd: None,
|
||||
output_byte_limit: None,
|
||||
terminal: lower.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// After Created, buffered Output should have been flushed into the renderer
|
||||
let content = thread.read_with(cx, |thread, cx| {
|
||||
let term = thread.terminal(terminal_id.clone()).unwrap();
|
||||
term.read_with(cx, |t, cx| t.inner().read(cx).get_content())
|
||||
});
|
||||
|
||||
assert!(
|
||||
content.contains("hello buffered"),
|
||||
"expected buffered output to render, got: {content}"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_terminal_output_and_exit_buffered_before_created(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, std::path::Path::new(path!("/test")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
|
||||
// Send Output BEFORE Created
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data: b"pre-exit data".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Send Exit BEFORE Created
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Now create a display-only lower-level terminal and send Created
|
||||
let lower = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: "Buffered Exit Test".to_string(),
|
||||
cwd: None,
|
||||
output_byte_limit: None,
|
||||
terminal: lower.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Output should be present after Created (flushed from buffer)
|
||||
let content = thread.read_with(cx, |thread, cx| {
|
||||
let term = thread.terminal(terminal_id.clone()).unwrap();
|
||||
term.read_with(cx, |t, cx| t.inner().read(cx).get_content())
|
||||
});
|
||||
|
||||
assert!(
|
||||
content.contains("pre-exit data"),
|
||||
"expected pre-exit data to render, got: {content}"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_push_user_content_block(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -31,7 +31,7 @@ impl Diff {
|
||||
let buffer = new_buffer.clone();
|
||||
async move |_, cx| {
|
||||
let language = language_registry
|
||||
.language_for_file_path(Path::new(&path))
|
||||
.load_language_for_file_path(Path::new(&path))
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@ task.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
terminal.workspace = true
|
||||
uuid.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -9,6 +9,7 @@ use futures::io::BufReader;
|
||||
use project::Project;
|
||||
use project::agent_server_store::AgentServerCommand;
|
||||
use serde::Deserialize;
|
||||
use task::Shell;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use std::path::PathBuf;
|
||||
@@ -19,7 +20,9 @@ use thiserror::Error;
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString, Task, WeakEntity};
|
||||
|
||||
use acp_thread::{AcpThread, AuthRequired, LoadError};
|
||||
use acp_thread::{AcpThread, AuthRequired, LoadError, TerminalProviderEvent};
|
||||
use terminal::TerminalBuilder;
|
||||
use terminal::terminal_settings::{AlternateScroll, CursorShape};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("Unsupported version")]
|
||||
@@ -380,6 +383,10 @@ impl AgentConnection for AcpConnection {
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Err(err) => {
|
||||
if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
|
||||
return Err(anyhow!(acp::Error::auth_required()));
|
||||
}
|
||||
|
||||
if err.code != ErrorCode::INTERNAL_ERROR.code {
|
||||
anyhow::bail!(err)
|
||||
}
|
||||
@@ -696,10 +703,100 @@ impl acp::Client for ClientDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
// Clone so we can inspect meta both before and after handing off to the thread
|
||||
let update_clone = notification.update.clone();
|
||||
|
||||
// Pre-handle: if a ToolCall carries terminal_info, create/register a display-only terminal.
|
||||
if let acp::SessionUpdate::ToolCall(tc) = &update_clone {
|
||||
if let Some(meta) = &tc.meta {
|
||||
if let Some(terminal_info) = meta.get("terminal_info") {
|
||||
if let Some(id_str) = terminal_info.get("terminal_id").and_then(|v| v.as_str())
|
||||
{
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let cwd = terminal_info
|
||||
.get("cwd")
|
||||
.and_then(|v| v.as_str().map(PathBuf::from));
|
||||
|
||||
// Create a minimal display-only lower-level terminal and register it.
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
let builder = TerminalBuilder::new_display_only(
|
||||
CursorShape::default(),
|
||||
AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)?;
|
||||
let lower = cx.new(|cx| builder.subscribe(cx));
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: tc.title.clone(),
|
||||
cwd,
|
||||
output_byte_limit: None,
|
||||
terminal: lower,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
anyhow::Ok(())
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Forward the update to the acp_thread as usual.
|
||||
session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.handle_session_update(notification.update, cx)
|
||||
thread.handle_session_update(notification.update.clone(), cx)
|
||||
})??;
|
||||
|
||||
// Post-handle: stream terminal output/exit if present on ToolCallUpdate meta.
|
||||
if let acp::SessionUpdate::ToolCallUpdate(tcu) = &update_clone {
|
||||
if let Some(meta) = &tcu.meta {
|
||||
if let Some(term_out) = meta.get("terminal_output") {
|
||||
if let Some(id_str) = term_out.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
if let Some(s) = term_out.get("data").and_then(|v| v.as_str()) {
|
||||
let data = s.as_bytes().to_vec();
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// terminal_exit
|
||||
if let Some(term_exit) = meta.get("terminal_exit") {
|
||||
if let Some(id_str) = term_exit.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let status = acp::TerminalExitStatus {
|
||||
exit_code: term_exit
|
||||
.get("exit_code")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|i| i as u32),
|
||||
signal: term_exit
|
||||
.get("signal")
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string())),
|
||||
meta: None,
|
||||
};
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id.clone(),
|
||||
status,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -707,25 +804,68 @@ impl acp::Client for ClientDelegate {
|
||||
&self,
|
||||
args: acp::CreateTerminalRequest,
|
||||
) -> Result<acp::CreateTerminalResponse, acp::Error> {
|
||||
let terminal = self
|
||||
.session_thread(&args.session_id)?
|
||||
.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.create_terminal(
|
||||
args.command,
|
||||
args.args,
|
||||
args.env,
|
||||
args.cwd,
|
||||
args.output_byte_limit,
|
||||
let thread = self.session_thread(&args.session_id)?;
|
||||
let project = thread.read_with(&self.cx, |thread, _cx| thread.project().clone())?;
|
||||
|
||||
let mut env = if let Some(dir) = &args.cwd {
|
||||
project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
project.directory_environment(&task::Shell::System, dir.clone().into(), cx)
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
for var in args.env {
|
||||
env.insert(var.name, var.value);
|
||||
}
|
||||
|
||||
// Use remote shell or default system shell, as appropriate
|
||||
let shell = project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
.map(Shell::Program)
|
||||
})?
|
||||
.unwrap_or(task::Shell::System);
|
||||
let (task_command, task_args) = task::ShellBuilder::new(&shell)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(args.command.clone()), &args.args);
|
||||
|
||||
let terminal_entity = project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
project.create_terminal_task(
|
||||
task::SpawnInTerminal {
|
||||
command: Some(task_command),
|
||||
args: task_args,
|
||||
cwd: args.cwd.clone(),
|
||||
env,
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
Ok(
|
||||
terminal.read_with(&self.cx, |terminal, _| acp::CreateTerminalResponse {
|
||||
terminal_id: terminal.id().clone(),
|
||||
meta: None,
|
||||
})?,
|
||||
)
|
||||
|
||||
// Register with renderer
|
||||
let terminal_entity = thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.register_terminal_created(
|
||||
acp::TerminalId(uuid::Uuid::new_v4().to_string().into()),
|
||||
format!("{} {}", args.command, args.args.join(" ")),
|
||||
args.cwd.clone(),
|
||||
args.output_byte_limit,
|
||||
terminal_entity,
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
let terminal_id =
|
||||
terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?;
|
||||
Ok(acp::CreateTerminalResponse {
|
||||
terminal_id,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn kill_terminal_command(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
mod acp;
|
||||
mod claude;
|
||||
mod codex;
|
||||
mod custom;
|
||||
mod gemini;
|
||||
|
||||
@@ -8,6 +9,7 @@ pub mod e2e_tests;
|
||||
|
||||
pub use claude::*;
|
||||
use client::ProxySettings;
|
||||
pub use codex::*;
|
||||
use collections::HashMap;
|
||||
pub use custom::*;
|
||||
use fs::Fs;
|
||||
|
||||
80
crates/agent_servers/src/codex.rs
Normal file
80
crates/agent_servers/src/codex.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::rc::Rc;
|
||||
use std::{any::Any, path::Path};
|
||||
|
||||
use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
|
||||
use acp_thread::AgentConnection;
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use project::agent_server_store::CODEX_NAME;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Codex;
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
|
||||
crate::common_e2e_tests!(async |_, _, _| Codex, allow_option_id = "proceed_once");
|
||||
}
|
||||
|
||||
impl AgentServer for Codex {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"codex"
|
||||
}
|
||||
|
||||
fn name(&self) -> SharedString {
|
||||
"Codex".into()
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
ui::IconName::AiOpenAi
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let extra_env = load_proxy_env(cx);
|
||||
let default_mode = self.default_mode(cx);
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let (command, root_dir, login) = store
|
||||
.update(cx, |store, cx| {
|
||||
let agent = store
|
||||
.get_external_agent(&CODEX_NAME.into())
|
||||
.context("Codex is not registered")?;
|
||||
anyhow::Ok(agent.get_command(
|
||||
root_dir.as_deref(),
|
||||
extra_env,
|
||||
delegate.status_tx,
|
||||
// For now, report that there are no updates.
|
||||
// (A future PR will use the GitHub Releases API to fetch them.)
|
||||
delegate.new_version_available,
|
||||
&mut cx.to_async(),
|
||||
))
|
||||
})??
|
||||
.await?;
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
Ok((connection, login))
|
||||
})
|
||||
}
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -483,6 +483,13 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
default_mode: None,
|
||||
}),
|
||||
gemini: Some(crate::gemini::tests::local_command().into()),
|
||||
codex: Some(BuiltinAgentServerSettings {
|
||||
path: Some("codex-acp".into()),
|
||||
args: None,
|
||||
env: None,
|
||||
ignore_system_version: None,
|
||||
default_mode: None,
|
||||
}),
|
||||
custom: collections::HashMap::default(),
|
||||
},
|
||||
cx,
|
||||
|
||||
@@ -80,7 +80,6 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
shlex.workspace = true
|
||||
smol.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
task.workspace = true
|
||||
|
||||
@@ -1047,6 +1047,12 @@ impl SlashCommandCompletion {
|
||||
let mut argument = None;
|
||||
let mut command = None;
|
||||
if let Some((command_text, args)) = last_command.split_once(char::is_whitespace) {
|
||||
// If the args start with '@', treat this as a mention completion, not a slash command argument.
|
||||
// Early-return None to let MentionCompletion::try_parse handle it.
|
||||
if args.trim_start().starts_with('@') {
|
||||
return None;
|
||||
}
|
||||
|
||||
if !args.is_empty() {
|
||||
argument = Some(args.trim_end().to_string());
|
||||
}
|
||||
@@ -1214,6 +1220,14 @@ mod tests {
|
||||
assert_eq!(SlashCommandCompletion::try_parse("Lorem/", 0), None);
|
||||
|
||||
assert_eq!(SlashCommandCompletion::try_parse("/ ", 0), None);
|
||||
|
||||
// Slash commands should not consume @ symbols that could be mention triggers
|
||||
assert_eq!(SlashCommandCompletion::try_parse("/init @", 0), None);
|
||||
|
||||
assert_eq!(SlashCommandCompletion::try_parse("/help @file", 0), None);
|
||||
|
||||
// @ with whitespace before it should also not be consumed
|
||||
assert_eq!(SlashCommandCompletion::try_parse("/command @", 0), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -203,7 +203,7 @@ impl EntryViewState {
|
||||
self.entries.drain(range);
|
||||
}
|
||||
|
||||
pub fn agent_font_size_changed(&mut self, cx: &mut App) {
|
||||
pub fn agent_ui_font_size_changed(&mut self, cx: &mut App) {
|
||||
for entry in self.entries.iter() {
|
||||
match entry {
|
||||
Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {}
|
||||
@@ -387,7 +387,7 @@ fn diff_editor_text_style_refinement(cx: &mut App) -> TextStyleRefinement {
|
||||
font_size: Some(
|
||||
TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_font_size(cx))
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_ui_font_size(cx))
|
||||
.into(),
|
||||
),
|
||||
..Default::default()
|
||||
|
||||
@@ -1299,7 +1299,7 @@ impl Render for MessageEditor {
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_fallbacks: settings.buffer_font.fallbacks.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: settings.buffer_font_size(cx).into(),
|
||||
font_size: settings.agent_buffer_font_size(cx).into(),
|
||||
line_height: relative(settings.buffer_line_height.value()),
|
||||
..Default::default()
|
||||
};
|
||||
@@ -2061,6 +2061,156 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_mention_menu_after_slash_command_with_space(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
// Create test files in the fake filesystem
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"test_file.txt": "test content",
|
||||
"another.txt": "more content",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
|
||||
let worktree = project.update(cx, |project, cx| {
|
||||
let mut worktrees = project.worktrees(cx).collect::<Vec<_>>();
|
||||
assert_eq!(worktrees.len(), 1);
|
||||
worktrees.pop().unwrap()
|
||||
});
|
||||
let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
|
||||
|
||||
let mut cx = VisualTestContext::from_window(*window, cx);
|
||||
|
||||
// Open the files so they appear in recent file mentions
|
||||
let paths = vec![rel_path("test_file.txt"), rel_path("another.txt")];
|
||||
for path in paths {
|
||||
workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: path.into(),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
|
||||
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
|
||||
let available_commands = Rc::new(RefCell::new(vec![acp::AvailableCommand {
|
||||
name: "init".to_string(),
|
||||
description: "Initialize a project".to_string(),
|
||||
input: Some(acp::AvailableCommandInput::Unstructured {
|
||||
hint: "<description>".to_string(),
|
||||
}),
|
||||
meta: None,
|
||||
}]));
|
||||
|
||||
let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let workspace_handle = cx.weak_entity();
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
prompt_capabilities.clone(),
|
||||
available_commands.clone(),
|
||||
"Claude Code".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
max_lines: None,
|
||||
min_lines: 1,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
message_editor.read(cx).focus_handle(cx).focus(window);
|
||||
message_editor.read(cx).editor().clone()
|
||||
});
|
||||
|
||||
// Type "/init " (slash command with space)
|
||||
cx.simulate_input("/init ");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "/init ");
|
||||
});
|
||||
|
||||
// Now type "@" - this should open the @ mention menu
|
||||
cx.simulate_input("@");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "/init @");
|
||||
|
||||
// The @ mention menu should be visible after typing @ following a slash command with space.
|
||||
assert!(
|
||||
editor.has_visible_completions_menu(),
|
||||
"Completion menu should be visible after typing @"
|
||||
);
|
||||
|
||||
// Check that we have @ mention completions (file mentions from recently opened files)
|
||||
// not slash command completions (which would be "init")
|
||||
let labels = current_completion_labels(editor);
|
||||
|
||||
// We should see our recently opened files in the completions
|
||||
let has_file_mention = labels
|
||||
.iter()
|
||||
.any(|label| label.contains("test_file.txt") || label.contains("another.txt"));
|
||||
|
||||
// We should NOT see the slash command "init"
|
||||
let has_slash_command = labels.iter().any(|label| label == "init");
|
||||
|
||||
assert!(
|
||||
has_file_mention,
|
||||
"Expected @ mention completions with file names (test_file.txt, another.txt) but got: {:?}",
|
||||
labels
|
||||
);
|
||||
|
||||
assert!(
|
||||
!has_slash_command,
|
||||
"Expected @ mention completions but got slash command completion 'init': {:?}",
|
||||
labels
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_context_completion_provider_mentions(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -9,7 +9,7 @@ use agent_client_protocol::{self as acp, PromptCapabilities};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use arrayvec::ArrayVec;
|
||||
use audio::{Audio, Sound};
|
||||
use buffer_diff::BufferDiff;
|
||||
@@ -381,8 +381,8 @@ impl AcpThreadView {
|
||||
});
|
||||
|
||||
let subscriptions = [
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::agent_font_size_changed),
|
||||
cx.observe_global_in::<AgentFontSize>(window, Self::agent_font_size_changed),
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::agent_ui_font_size_changed),
|
||||
cx.observe_global_in::<AgentFontSize>(window, Self::agent_ui_font_size_changed),
|
||||
cx.subscribe_in(&message_editor, window, Self::handle_message_editor_event),
|
||||
cx.subscribe_in(&entry_view_state, window, Self::handle_entry_view_event),
|
||||
];
|
||||
@@ -1012,11 +1012,13 @@ impl AcpThreadView {
|
||||
};
|
||||
|
||||
let connection = thread.read(cx).connection().clone();
|
||||
if !connection
|
||||
.auth_methods()
|
||||
.iter()
|
||||
.any(|method| method.id.0.as_ref() == "claude-login")
|
||||
{
|
||||
let auth_methods = connection.auth_methods();
|
||||
let has_supported_auth = auth_methods.iter().any(|method| {
|
||||
let id = method.id.0.as_ref();
|
||||
id == "claude-login" || id == "spawn-gemini-cli"
|
||||
});
|
||||
let can_login = has_supported_auth || auth_methods.is_empty() || self.login.is_some();
|
||||
if !can_login {
|
||||
return;
|
||||
};
|
||||
let this = cx.weak_entity();
|
||||
@@ -1579,31 +1581,20 @@ impl AcpThreadView {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let cwd = project.read(cx).first_project_directory(cx);
|
||||
let shell = project.read(cx).terminal_settings(&cwd, cx).shell.clone();
|
||||
|
||||
window.spawn(cx, async move |cx| {
|
||||
let mut task = login.clone();
|
||||
task.command = task
|
||||
.command
|
||||
.map(|command| anyhow::Ok(shlex::try_quote(&command)?.to_string()))
|
||||
.transpose()?;
|
||||
task.args = task
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
Ok(shlex::try_quote(arg)
|
||||
.context("Failed to quote argument")?
|
||||
.to_string())
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
task.shell = task::Shell::WithArguments {
|
||||
program: task.command.take().expect("login command should be set"),
|
||||
args: std::mem::take(&mut task.args),
|
||||
title_override: None
|
||||
};
|
||||
task.full_label = task.label.clone();
|
||||
task.id = task::TaskId(format!("external-agent-{}-login", task.label));
|
||||
task.command_label = task.label.clone();
|
||||
task.use_new_terminal = true;
|
||||
task.allow_concurrent_runs = true;
|
||||
task.hide = task::HideStrategy::Always;
|
||||
task.shell = shell;
|
||||
|
||||
let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||
terminal_panel.spawn_task(&task, window, cx)
|
||||
@@ -2725,7 +2716,7 @@ impl AcpThreadView {
|
||||
|
||||
let working_dir = working_dir
|
||||
.as_ref()
|
||||
.map(|path| format!("{}", path.display()))
|
||||
.map(|path| path.display().to_string())
|
||||
.unwrap_or_else(|| "current directory".to_string());
|
||||
|
||||
let is_expanded = self.expanded_tool_calls.contains(&tool_call.id);
|
||||
@@ -3712,13 +3703,10 @@ impl AcpThreadView {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!(
|
||||
"{separator}{}{separator}",
|
||||
parent.display(path_style)
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
Label::new(format!("{}{separator}", parent.display(path_style)))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
}
|
||||
});
|
||||
@@ -4914,9 +4902,9 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
|
||||
fn agent_font_size_changed(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn agent_ui_font_size_changed(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.entry_view_state.update(cx, |entry_view_state, cx| {
|
||||
entry_view_state.agent_font_size_changed(cx);
|
||||
entry_view_state.agent_ui_font_size_changed(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5546,23 +5534,23 @@ fn default_markdown_style(
|
||||
}),
|
||||
code_block: StyleRefinement {
|
||||
padding: EdgesRefinement {
|
||||
top: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
left: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
right: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
bottom: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
top: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
left: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
right: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
bottom: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
},
|
||||
margin: EdgesRefinement {
|
||||
top: Some(Length::Definite(Pixels(8.).into())),
|
||||
left: Some(Length::Definite(Pixels(0.).into())),
|
||||
right: Some(Length::Definite(Pixels(0.).into())),
|
||||
bottom: Some(Length::Definite(Pixels(12.).into())),
|
||||
top: Some(Length::Definite(px(8.).into())),
|
||||
left: Some(Length::Definite(px(0.).into())),
|
||||
right: Some(Length::Definite(px(0.).into())),
|
||||
bottom: Some(Length::Definite(px(12.).into())),
|
||||
},
|
||||
border_style: Some(BorderStyle::Solid),
|
||||
border_widths: EdgesRefinement {
|
||||
top: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
left: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
right: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
bottom: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
top: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
left: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
right: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
bottom: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
},
|
||||
border_color: Some(colors.border_variant),
|
||||
background: Some(colors.editor_background.into()),
|
||||
|
||||
@@ -15,6 +15,7 @@ use context_server::ContextServerId;
|
||||
use editor::{Editor, SelectionEffects, scroll::Autoscroll};
|
||||
use extension::ExtensionManifest;
|
||||
use extension_host::ExtensionStore;
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
@@ -26,7 +27,7 @@ use language_model::{
|
||||
};
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, GEMINI_NAME},
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
@@ -1014,7 +1015,9 @@ impl AgentConfiguration {
|
||||
.agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME)
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -1077,15 +1080,23 @@ impl AgentConfiguration {
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiClaude,
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
})
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
|
||||
@@ -317,6 +317,8 @@ impl ManageProfilesModal {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement + use<> {
|
||||
let is_focused = profile.navigation.focus_handle.contains_focused(window, cx);
|
||||
|
||||
div()
|
||||
.id(SharedString::from(format!("profile-{}", profile.id)))
|
||||
.track_focus(&profile.navigation.focus_handle)
|
||||
@@ -328,25 +330,27 @@ impl ManageProfilesModal {
|
||||
})
|
||||
.child(
|
||||
ListItem::new(SharedString::from(format!("profile-{}", profile.id)))
|
||||
.toggle_state(profile.navigation.focus_handle.contains_focused(window, cx))
|
||||
.toggle_state(is_focused)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.child(Label::new(profile.name.clone()))
|
||||
.end_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new("Customize")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(KeyBinding::for_action_in(
|
||||
&menu::Confirm,
|
||||
&self.focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.when(is_focused, |this| {
|
||||
this.end_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new("Customize")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(KeyBinding::for_action_in(
|
||||
&menu::Confirm,
|
||||
&self.focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let profile_id = profile.id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
|
||||
@@ -7,7 +7,7 @@ use acp_thread::AcpThread;
|
||||
use agent2::{DbThreadMetadata, HistoryEntry};
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use project::agent_server_store::{
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, GEMINI_NAME,
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
@@ -75,6 +75,7 @@ use zed_actions::{
|
||||
assistant::{OpenRulesLibrary, ToggleFocus},
|
||||
};
|
||||
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -216,6 +217,7 @@ pub enum AgentType {
|
||||
TextThread,
|
||||
Gemini,
|
||||
ClaudeCode,
|
||||
Codex,
|
||||
NativeAgent,
|
||||
Custom {
|
||||
name: SharedString,
|
||||
@@ -230,6 +232,7 @@ impl AgentType {
|
||||
Self::NativeAgent => "Agent 2".into(),
|
||||
Self::Gemini => "Gemini CLI".into(),
|
||||
Self::ClaudeCode => "Claude Code".into(),
|
||||
Self::Codex => "Codex".into(),
|
||||
Self::Custom { name, .. } => name.into(),
|
||||
}
|
||||
}
|
||||
@@ -239,6 +242,7 @@ impl AgentType {
|
||||
Self::Zed | Self::NativeAgent | Self::TextThread => None,
|
||||
Self::Gemini => Some(IconName::AiGemini),
|
||||
Self::ClaudeCode => Some(IconName::AiClaude),
|
||||
Self::Codex => Some(IconName::AiOpenAi),
|
||||
Self::Custom { .. } => Some(IconName::Terminal),
|
||||
}
|
||||
}
|
||||
@@ -249,6 +253,7 @@ impl From<ExternalAgent> for AgentType {
|
||||
match value {
|
||||
ExternalAgent::Gemini => Self::Gemini,
|
||||
ExternalAgent::ClaudeCode => Self::ClaudeCode,
|
||||
ExternalAgent::Codex => Self::Codex,
|
||||
ExternalAgent::Custom { name, command } => Self::Custom { name, command },
|
||||
ExternalAgent::NativeAgent => Self::NativeAgent,
|
||||
}
|
||||
@@ -1103,15 +1108,15 @@ impl AgentPanel {
|
||||
WhichFontSize::AgentFont => {
|
||||
if persist {
|
||||
update_settings_file(self.fs.clone(), cx, move |settings, cx| {
|
||||
let agent_font_size =
|
||||
ThemeSettings::get_global(cx).agent_font_size(cx) + delta;
|
||||
let agent_ui_font_size =
|
||||
ThemeSettings::get_global(cx).agent_ui_font_size(cx) + delta;
|
||||
let _ = settings
|
||||
.theme
|
||||
.agent_font_size
|
||||
.insert(theme::clamp_font_size(agent_font_size).into());
|
||||
.agent_ui_font_size
|
||||
.insert(theme::clamp_font_size(agent_ui_font_size).into());
|
||||
});
|
||||
} else {
|
||||
theme::adjust_agent_font_size(cx, |size| size + delta);
|
||||
theme::adjust_agent_ui_font_size(cx, |size| size + delta);
|
||||
}
|
||||
}
|
||||
WhichFontSize::BufferFont => {
|
||||
@@ -1131,10 +1136,10 @@ impl AgentPanel {
|
||||
) {
|
||||
if action.persist {
|
||||
update_settings_file(self.fs.clone(), cx, move |settings, _| {
|
||||
settings.theme.agent_font_size = None;
|
||||
settings.theme.agent_ui_font_size = None;
|
||||
});
|
||||
} else {
|
||||
theme::reset_agent_font_size(cx);
|
||||
theme::reset_agent_ui_font_size(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1427,6 +1432,11 @@ impl AgentPanel {
|
||||
cx,
|
||||
)
|
||||
}
|
||||
AgentType::Codex => {
|
||||
self.selected_agent = AgentType::Codex;
|
||||
self.serialize(cx);
|
||||
self.external_thread(Some(crate::ExternalAgent::Codex), None, None, window, cx)
|
||||
}
|
||||
AgentType::Custom { name, command } => self.external_thread(
|
||||
Some(crate::ExternalAgent::Custom { name, command }),
|
||||
None,
|
||||
@@ -1939,32 +1949,6 @@ impl AgentPanel {
|
||||
)
|
||||
.separator()
|
||||
.header("External Agents")
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Gemini,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Claude Code Thread")
|
||||
.icon(IconName::AiClaude)
|
||||
@@ -1991,12 +1975,66 @@ impl AgentPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Gemini,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|mut menu| {
|
||||
let agent_names = agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
@@ -2532,7 +2570,7 @@ impl Render for AgentPanel {
|
||||
|
||||
match self.active_view.which_font_size_used() {
|
||||
WhichFontSize::AgentFont => {
|
||||
WithRemSize::new(ThemeSettings::get_global(cx).agent_font_size(cx))
|
||||
WithRemSize::new(ThemeSettings::get_global(cx).agent_ui_font_size(cx))
|
||||
.size_full()
|
||||
.child(content)
|
||||
.into_any()
|
||||
|
||||
@@ -167,6 +167,7 @@ enum ExternalAgent {
|
||||
#[default]
|
||||
Gemini,
|
||||
ClaudeCode,
|
||||
Codex,
|
||||
NativeAgent,
|
||||
Custom {
|
||||
name: SharedString,
|
||||
@@ -188,6 +189,7 @@ impl ExternalAgent {
|
||||
Self::NativeAgent => "zed",
|
||||
Self::Gemini => "gemini-cli",
|
||||
Self::ClaudeCode => "claude-code",
|
||||
Self::Codex => "codex",
|
||||
Self::Custom { .. } => "custom",
|
||||
}
|
||||
}
|
||||
@@ -200,6 +202,7 @@ impl ExternalAgent {
|
||||
match self {
|
||||
Self::Gemini => Rc::new(agent_servers::Gemini),
|
||||
Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
|
||||
Self::Codex => Rc::new(agent_servers::Codex),
|
||||
Self::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)),
|
||||
Self::Custom { name, command: _ } => {
|
||||
Rc::new(agent_servers::CustomAgentServer::new(name.clone()))
|
||||
|
||||
@@ -18,7 +18,9 @@ use agent_settings::AgentSettings;
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{HashMap, HashSet, VecDeque, hash_map};
|
||||
use editor::RowExt;
|
||||
use editor::SelectionEffects;
|
||||
use editor::scroll::ScrollOffset;
|
||||
use editor::{
|
||||
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange,
|
||||
MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
|
||||
@@ -744,7 +746,7 @@ impl InlineAssistant {
|
||||
let scroll_bottom = scroll_top + editor.visible_line_count().unwrap_or(0.);
|
||||
editor_assists.scroll_lock = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)
|
||||
.map(|row| row.0 as f32)
|
||||
.map(|row| row.as_f64())
|
||||
.filter(|prompt_row| (scroll_top..scroll_bottom).contains(&prompt_row))
|
||||
.map(|prompt_row| InlineAssistScrollLock {
|
||||
assist_id,
|
||||
@@ -910,7 +912,9 @@ impl InlineAssistant {
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
let scroll_position = editor.scroll_position(cx);
|
||||
let target_scroll_top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32
|
||||
let target_scroll_top = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)?
|
||||
.as_f64()
|
||||
- scroll_lock.distance_from_top;
|
||||
if target_scroll_top != scroll_position.y {
|
||||
editor.set_scroll_position(point(scroll_position.x, target_scroll_top), window, cx);
|
||||
@@ -959,8 +963,9 @@ impl InlineAssistant {
|
||||
if let Some(decorations) = assist.decorations.as_ref() {
|
||||
let distance_from_top = editor.update(cx, |editor, cx| {
|
||||
let scroll_top = editor.scroll_position(cx).y;
|
||||
let prompt_row =
|
||||
editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32;
|
||||
let prompt_row = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)?
|
||||
.0 as ScrollOffset;
|
||||
Some(prompt_row - scroll_top)
|
||||
});
|
||||
|
||||
@@ -1192,8 +1197,8 @@ impl InlineAssistant {
|
||||
let mut scroll_target_range = None;
|
||||
if let Some(decorations) = assist.decorations.as_ref() {
|
||||
scroll_target_range = maybe!({
|
||||
let top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32;
|
||||
let bottom = editor.row_for_block(decorations.end_block_id, cx)?.0 as f32;
|
||||
let top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f64;
|
||||
let bottom = editor.row_for_block(decorations.end_block_id, cx)?.0 as f64;
|
||||
Some((top, bottom))
|
||||
});
|
||||
if scroll_target_range.is_none() {
|
||||
@@ -1207,15 +1212,15 @@ impl InlineAssistant {
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
let top = start_row.0 as f32;
|
||||
let top = start_row.0 as ScrollOffset;
|
||||
let bottom = top + 1.0;
|
||||
(top, bottom)
|
||||
});
|
||||
let mut scroll_target_top = scroll_target_range.0;
|
||||
let mut scroll_target_bottom = scroll_target_range.1;
|
||||
|
||||
scroll_target_top -= editor.vertical_scroll_margin() as f32;
|
||||
scroll_target_bottom += editor.vertical_scroll_margin() as f32;
|
||||
scroll_target_top -= editor.vertical_scroll_margin() as ScrollOffset;
|
||||
scroll_target_bottom += editor.vertical_scroll_margin() as ScrollOffset;
|
||||
|
||||
let height_in_lines = editor.visible_line_count().unwrap_or(0.);
|
||||
let scroll_top = editor.scroll_position(cx).y;
|
||||
@@ -1543,7 +1548,7 @@ struct EditorInlineAssists {
|
||||
|
||||
struct InlineAssistScrollLock {
|
||||
assist_id: InlineAssistId,
|
||||
distance_from_top: f32,
|
||||
distance_from_top: ScrollOffset,
|
||||
}
|
||||
|
||||
impl EditorInlineAssists {
|
||||
|
||||
@@ -3,12 +3,20 @@ use agent_settings::{
|
||||
AgentProfile, AgentProfileId, AgentSettings, AvailableProfiles, builtin_profiles,
|
||||
};
|
||||
use fs::Fs;
|
||||
use gpui::{Action, Entity, FocusHandle, Subscription, prelude::*};
|
||||
use settings::{DockPosition, Settings as _, SettingsStore, update_settings_file};
|
||||
use std::sync::Arc;
|
||||
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
|
||||
use gpui::{
|
||||
Action, AnyElement, App, BackgroundExecutor, Context, DismissEvent, Entity, FocusHandle,
|
||||
Focusable, SharedString, Subscription, Task, Window,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate, popover_menu::PickerPopoverMenu};
|
||||
use settings::{Settings as _, SettingsStore, update_settings_file};
|
||||
use std::{
|
||||
sync::atomic::Ordering,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::{
|
||||
ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, PopoverMenu,
|
||||
PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize,
|
||||
ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
};
|
||||
|
||||
/// Trait for types that can provide and manage agent profiles
|
||||
@@ -25,9 +33,11 @@ pub trait ProfileProvider {
|
||||
|
||||
pub struct ProfileSelector {
|
||||
profiles: AvailableProfiles,
|
||||
pending_refresh: bool,
|
||||
fs: Arc<dyn Fs>,
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
picker: Option<Entity<Picker<ProfilePickerDelegate>>>,
|
||||
picker_handle: PopoverMenuHandle<Picker<ProfilePickerDelegate>>,
|
||||
focus_handle: FocusHandle,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
@@ -40,125 +50,91 @@ impl ProfileSelector {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let settings_subscription = cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
this.refresh_profiles(cx);
|
||||
this.pending_refresh = true;
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Self {
|
||||
profiles: AgentProfile::available_profiles(cx),
|
||||
pending_refresh: false,
|
||||
fs,
|
||||
provider,
|
||||
menu_handle: PopoverMenuHandle::default(),
|
||||
picker: None,
|
||||
picker_handle: PopoverMenuHandle::default(),
|
||||
focus_handle,
|
||||
_subscriptions: vec![settings_subscription],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn menu_handle(&self) -> PopoverMenuHandle<ContextMenu> {
|
||||
self.menu_handle.clone()
|
||||
pub fn menu_handle(&self) -> PopoverMenuHandle<Picker<ProfilePickerDelegate>> {
|
||||
self.picker_handle.clone()
|
||||
}
|
||||
|
||||
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
|
||||
self.profiles = AgentProfile::available_profiles(cx);
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
&self,
|
||||
fn ensure_picker(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
) -> Entity<Picker<ProfilePickerDelegate>> {
|
||||
if self.picker.is_none() {
|
||||
let delegate = ProfilePickerDelegate::new(
|
||||
self.fs.clone(),
|
||||
self.provider.clone(),
|
||||
self.profiles.clone(),
|
||||
cx.background_executor().clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
let mut found_non_builtin = false;
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if !builtin_profiles::is_builtin(profile_id) {
|
||||
found_non_builtin = true;
|
||||
continue;
|
||||
}
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile_name,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
let picker = cx.new(|cx| {
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(18.))
|
||||
.max_height(Some(rems(20.).into()))
|
||||
});
|
||||
|
||||
if found_non_builtin {
|
||||
menu = menu.separator().header("Custom Profiles");
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if builtin_profiles::is_builtin(profile_id) {
|
||||
continue;
|
||||
}
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile_name,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
}
|
||||
self.picker = Some(picker);
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
menu = menu.item(ContextMenuEntry::new("Configure Profiles…").handler(
|
||||
move |window, cx| {
|
||||
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
|
||||
},
|
||||
));
|
||||
|
||||
menu
|
||||
})
|
||||
}
|
||||
|
||||
fn menu_entry_for_profile(
|
||||
&self,
|
||||
profile_id: AgentProfileId,
|
||||
profile_name: &SharedString,
|
||||
settings: &AgentSettings,
|
||||
cx: &App,
|
||||
) -> ContextMenuEntry {
|
||||
let documentation = match profile_name.to_lowercase().as_str() {
|
||||
builtin_profiles::WRITE => Some("Get help to write anything."),
|
||||
builtin_profiles::ASK => Some("Chat about your codebase."),
|
||||
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
|
||||
_ => None,
|
||||
};
|
||||
let thread_profile_id = self.provider.profile_id(cx);
|
||||
|
||||
let entry = ContextMenuEntry::new(profile_name.clone())
|
||||
.toggleable(IconPosition::End, profile_id == thread_profile_id);
|
||||
|
||||
let entry = if let Some(doc_text) = documentation {
|
||||
entry.documentation_aside(
|
||||
documentation_side(settings.dock),
|
||||
DocumentationEdge::Top,
|
||||
move |_| Label::new(doc_text).into_any_element(),
|
||||
)
|
||||
} else {
|
||||
entry
|
||||
};
|
||||
|
||||
entry.handler({
|
||||
let fs = self.fs.clone();
|
||||
let provider = self.provider.clone();
|
||||
move |_window, cx| {
|
||||
update_settings_file(fs.clone(), cx, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |settings, _cx| {
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_profile(profile_id.0);
|
||||
}
|
||||
if self.pending_refresh {
|
||||
if let Some(picker) = &self.picker {
|
||||
let profiles = AgentProfile::available_profiles(cx);
|
||||
self.profiles = profiles.clone();
|
||||
picker.update(cx, |picker, cx| {
|
||||
let query = picker.query(cx);
|
||||
picker
|
||||
.delegate
|
||||
.refresh_profiles(profiles.clone(), query, cx);
|
||||
});
|
||||
|
||||
provider.set_profile(profile_id.clone(), cx);
|
||||
}
|
||||
})
|
||||
self.pending_refresh = false;
|
||||
}
|
||||
|
||||
self.picker.as_ref().unwrap().clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for ProfileSelector {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
if let Some(picker) = &self.picker {
|
||||
picker.focus_handle(cx)
|
||||
} else {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProfileSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
if !self.provider.profiles_supported(cx) {
|
||||
return Button::new("tools-not-supported-button", "Tools Unsupported")
|
||||
.disabled(true)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.tooltip(Tooltip::text("This model does not support tools."))
|
||||
.into_any_element();
|
||||
}
|
||||
|
||||
let picker = self.ensure_picker(window, cx);
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let profile_id = self.provider.profile_id(cx);
|
||||
let profile = settings.profiles.get(&profile_id);
|
||||
@@ -166,62 +142,594 @@ impl Render for ProfileSelector {
|
||||
let selected_profile = profile
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
if self.provider.profiles_supported(cx) {
|
||||
let this = cx.entity();
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
let trigger_button = Button::new("profile-selector-model", selected_profile)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.icon(IconName::ChevronDown)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_position(IconPosition::End)
|
||||
.icon_color(Color::Muted)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent));
|
||||
let trigger_button = Button::new("profile-selector", selected_profile)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.icon(IconName::ChevronDown)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_position(IconPosition::End)
|
||||
.icon_color(Color::Muted)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent));
|
||||
|
||||
PopoverMenu::new("profile-selector")
|
||||
.trigger_with_tooltip(trigger_button, {
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Toggle Profile Menu",
|
||||
&ToggleProfileSelector,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.anchor(
|
||||
if documentation_side(settings.dock) == DocumentationSide::Left {
|
||||
gpui::Corner::BottomRight
|
||||
} else {
|
||||
gpui::Corner::BottomLeft
|
||||
},
|
||||
PickerPopoverMenu::new(
|
||||
picker,
|
||||
trigger_button,
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Toggle Profile Menu",
|
||||
&ToggleProfileSelector,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.with_handle(self.menu_handle.clone())
|
||||
.menu(move |window, cx| {
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
})
|
||||
.offset(gpui::Point {
|
||||
x: px(0.0),
|
||||
y: px(-2.0),
|
||||
})
|
||||
.into_any_element()
|
||||
},
|
||||
gpui::Corner::BottomRight,
|
||||
cx,
|
||||
)
|
||||
.with_handle(self.picker_handle.clone())
|
||||
.render(window, cx)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ProfileCandidate {
|
||||
id: AgentProfileId,
|
||||
name: SharedString,
|
||||
is_builtin: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ProfileMatchEntry {
|
||||
candidate_index: usize,
|
||||
positions: Vec<usize>,
|
||||
}
|
||||
|
||||
enum ProfilePickerEntry {
|
||||
Header(SharedString),
|
||||
Profile(ProfileMatchEntry),
|
||||
}
|
||||
|
||||
pub(crate) struct ProfilePickerDelegate {
|
||||
fs: Arc<dyn Fs>,
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
background: BackgroundExecutor,
|
||||
candidates: Vec<ProfileCandidate>,
|
||||
string_candidates: Arc<Vec<StringMatchCandidate>>,
|
||||
filtered_entries: Vec<ProfilePickerEntry>,
|
||||
selected_index: usize,
|
||||
query: String,
|
||||
cancel: Option<Arc<AtomicBool>>,
|
||||
}
|
||||
|
||||
impl ProfilePickerDelegate {
|
||||
fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
profiles: AvailableProfiles,
|
||||
background: BackgroundExecutor,
|
||||
cx: &mut Context<ProfileSelector>,
|
||||
) -> Self {
|
||||
let candidates = Self::candidates_from(profiles);
|
||||
let string_candidates = Arc::new(Self::string_candidates(&candidates));
|
||||
let filtered_entries = Self::entries_from_candidates(&candidates);
|
||||
|
||||
let mut this = Self {
|
||||
fs,
|
||||
provider,
|
||||
background,
|
||||
candidates,
|
||||
string_candidates,
|
||||
filtered_entries,
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
|
||||
this.selected_index = this
|
||||
.index_of_profile(&this.provider.profile_id(cx))
|
||||
.unwrap_or_else(|| this.first_selectable_index().unwrap_or(0));
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
fn refresh_profiles(
|
||||
&mut self,
|
||||
profiles: AvailableProfiles,
|
||||
query: String,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.candidates = Self::candidates_from(profiles);
|
||||
self.string_candidates = Arc::new(Self::string_candidates(&self.candidates));
|
||||
self.query = query;
|
||||
|
||||
if self.query.is_empty() {
|
||||
self.filtered_entries = Self::entries_from_candidates(&self.candidates);
|
||||
} else {
|
||||
Button::new("tools-not-supported-button", "Tools Unsupported")
|
||||
.disabled(true)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.tooltip(Tooltip::text("This model does not support tools."))
|
||||
.into_any_element()
|
||||
let matches = self.search_blocking(&self.query);
|
||||
self.filtered_entries = self.entries_from_matches(matches);
|
||||
}
|
||||
|
||||
self.selected_index = self
|
||||
.index_of_profile(&self.provider.profile_id(cx))
|
||||
.unwrap_or_else(|| self.first_selectable_index().unwrap_or(0));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn candidates_from(profiles: AvailableProfiles) -> Vec<ProfileCandidate> {
|
||||
profiles
|
||||
.into_iter()
|
||||
.map(|(id, name)| ProfileCandidate {
|
||||
is_builtin: builtin_profiles::is_builtin(&id),
|
||||
id,
|
||||
name,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn string_candidates(candidates: &[ProfileCandidate]) -> Vec<StringMatchCandidate> {
|
||||
candidates
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatchCandidate::new(index, candidate.name.as_ref()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn documentation(candidate: &ProfileCandidate) -> Option<&'static str> {
|
||||
match candidate.id.as_str() {
|
||||
builtin_profiles::WRITE => Some("Get help to write anything."),
|
||||
builtin_profiles::ASK => Some("Chat about your codebase."),
|
||||
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn entries_from_candidates(candidates: &[ProfileCandidate]) -> Vec<ProfilePickerEntry> {
|
||||
let mut entries = Vec::new();
|
||||
let mut inserted_custom_header = false;
|
||||
|
||||
for (idx, candidate) in candidates.iter().enumerate() {
|
||||
if !candidate.is_builtin && !inserted_custom_header {
|
||||
if !entries.is_empty() {
|
||||
entries.push(ProfilePickerEntry::Header("Custom Profiles".into()));
|
||||
}
|
||||
inserted_custom_header = true;
|
||||
}
|
||||
|
||||
entries.push(ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: idx,
|
||||
positions: Vec::new(),
|
||||
}));
|
||||
}
|
||||
|
||||
entries
|
||||
}
|
||||
|
||||
fn entries_from_matches(&self, matches: Vec<StringMatch>) -> Vec<ProfilePickerEntry> {
|
||||
let mut entries = Vec::new();
|
||||
for mat in matches {
|
||||
if self.candidates.get(mat.candidate_id).is_some() {
|
||||
entries.push(ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: mat.candidate_id,
|
||||
positions: mat.positions,
|
||||
}));
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
fn first_selectable_index(&self) -> Option<usize> {
|
||||
self.filtered_entries
|
||||
.iter()
|
||||
.position(|entry| matches!(entry, ProfilePickerEntry::Profile(_)))
|
||||
}
|
||||
|
||||
fn index_of_profile(&self, profile_id: &AgentProfileId) -> Option<usize> {
|
||||
self.filtered_entries.iter().position(|entry| {
|
||||
matches!(entry, ProfilePickerEntry::Profile(profile) if self
|
||||
.candidates
|
||||
.get(profile.candidate_index)
|
||||
.map(|candidate| &candidate.id == profile_id)
|
||||
.unwrap_or(false))
|
||||
})
|
||||
}
|
||||
|
||||
fn search_blocking(&self, query: &str) -> Vec<StringMatch> {
|
||||
if query.is_empty() {
|
||||
return self
|
||||
.string_candidates
|
||||
.iter()
|
||||
.map(|candidate| StringMatch {
|
||||
candidate_id: candidate.id,
|
||||
score: 0.0,
|
||||
positions: Vec::new(),
|
||||
string: candidate.string.clone(),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
let cancel_flag = AtomicBool::new(false);
|
||||
|
||||
self.background.block(match_strings(
|
||||
self.string_candidates.as_ref(),
|
||||
query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
&cancel_flag,
|
||||
self.background.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for ProfilePickerDelegate {
|
||||
type ListItem = AnyElement;
|
||||
|
||||
fn placeholder_text(&self, _: &mut Window, _: &mut App) -> Arc<str> {
|
||||
"Search profiles…".into()
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
let text = if self.candidates.is_empty() {
|
||||
"No profiles.".into()
|
||||
} else {
|
||||
"No profiles match your search.".into()
|
||||
};
|
||||
Some(text)
|
||||
}
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.filtered_entries.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.selected_index = ix.min(self.filtered_entries.len().saturating_sub(1));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn can_select(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> bool {
|
||||
match self.filtered_entries.get(ix) {
|
||||
Some(ProfilePickerEntry::Profile(_)) => true,
|
||||
Some(ProfilePickerEntry::Header(_)) | None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
if query.is_empty() {
|
||||
self.query.clear();
|
||||
self.filtered_entries = Self::entries_from_candidates(&self.candidates);
|
||||
self.selected_index = self
|
||||
.index_of_profile(&self.provider.profile_id(cx))
|
||||
.unwrap_or_else(|| self.first_selectable_index().unwrap_or(0));
|
||||
cx.notify();
|
||||
return Task::ready(());
|
||||
}
|
||||
|
||||
if let Some(prev) = &self.cancel {
|
||||
prev.store(true, Ordering::Relaxed);
|
||||
}
|
||||
let cancel = Arc::new(AtomicBool::new(false));
|
||||
self.cancel = Some(cancel.clone());
|
||||
|
||||
let string_candidates = self.string_candidates.clone();
|
||||
let background = self.background.clone();
|
||||
let provider = self.provider.clone();
|
||||
self.query = query.clone();
|
||||
|
||||
let cancel_for_future = cancel;
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = match_strings(
|
||||
string_candidates.as_ref(),
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
cancel_for_future.as_ref(),
|
||||
background,
|
||||
)
|
||||
.await;
|
||||
|
||||
this.update_in(cx, |this, _, cx| {
|
||||
if this.delegate.query != query {
|
||||
return;
|
||||
}
|
||||
|
||||
this.delegate.filtered_entries = this.delegate.entries_from_matches(matches);
|
||||
this.delegate.selected_index = this
|
||||
.delegate
|
||||
.index_of_profile(&provider.profile_id(cx))
|
||||
.unwrap_or_else(|| this.delegate.first_selectable_index().unwrap_or(0));
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
match self.filtered_entries.get(self.selected_index) {
|
||||
Some(ProfilePickerEntry::Profile(entry)) => {
|
||||
if let Some(candidate) = self.candidates.get(entry.candidate_index) {
|
||||
let profile_id = candidate.id.clone();
|
||||
let fs = self.fs.clone();
|
||||
let provider = self.provider.clone();
|
||||
|
||||
update_settings_file(fs, cx, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |settings, _cx| {
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_profile(profile_id.0);
|
||||
}
|
||||
});
|
||||
|
||||
provider.set_profile(profile_id.clone(), cx);
|
||||
|
||||
telemetry::event!(
|
||||
"agent_profile_switched",
|
||||
profile_id = profile_id.as_str(),
|
||||
source = "picker"
|
||||
);
|
||||
}
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
cx.defer_in(window, |picker, window, cx| {
|
||||
picker.set_query("", window, cx);
|
||||
});
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
match self.filtered_entries.get(ix)? {
|
||||
ProfilePickerEntry::Header(label) => Some(
|
||||
div()
|
||||
.px_2p5()
|
||||
.pb_0p5()
|
||||
.when(ix > 0, |this| {
|
||||
this.mt_1p5()
|
||||
.pt_2()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
})
|
||||
.child(
|
||||
Label::new(label.clone())
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.into_any_element(),
|
||||
),
|
||||
ProfilePickerEntry::Profile(entry) => {
|
||||
let candidate = self.candidates.get(entry.candidate_index)?;
|
||||
let active_id = self.provider.profile_id(cx);
|
||||
let is_active = active_id == candidate.id;
|
||||
|
||||
Some(
|
||||
ListItem::new(SharedString::from(candidate.id.0.clone()))
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.child(HighlightedLabel::new(
|
||||
candidate.name.clone(),
|
||||
entry.positions.clone(),
|
||||
))
|
||||
.when(is_active, |this| {
|
||||
this.end_slot(
|
||||
div()
|
||||
.pr_2()
|
||||
.child(Icon::new(IconName::Check).color(Color::Accent)),
|
||||
)
|
||||
})
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn documentation_aside(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<DocumentationAside> {
|
||||
use std::rc::Rc;
|
||||
|
||||
let entry = match self.filtered_entries.get(self.selected_index)? {
|
||||
ProfilePickerEntry::Profile(entry) => entry,
|
||||
ProfilePickerEntry::Header(_) => return None,
|
||||
};
|
||||
|
||||
let candidate = self.candidates.get(entry.candidate_index)?;
|
||||
let docs_aside = Self::documentation(candidate)?.to_string();
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let side = match settings.dock {
|
||||
settings::DockPosition::Left => DocumentationSide::Right,
|
||||
settings::DockPosition::Bottom | settings::DockPosition::Right => {
|
||||
DocumentationSide::Left
|
||||
}
|
||||
};
|
||||
|
||||
Some(DocumentationAside {
|
||||
side,
|
||||
edge: DocumentationEdge::Top,
|
||||
render: Rc::new(move |_| Label::new(docs_aside.clone()).into_any_element()),
|
||||
})
|
||||
}
|
||||
|
||||
fn render_footer(
|
||||
&self,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.p_1()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
.child(
|
||||
Button::new("configure", "Configure")
|
||||
.icon(IconName::Settings)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
|
||||
}),
|
||||
)
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
|
||||
#[gpui::test]
|
||||
fn entries_include_custom_profiles(_cx: &mut TestAppContext) {
|
||||
let candidates = vec![
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("write".into()),
|
||||
name: SharedString::from("Write"),
|
||||
is_builtin: true,
|
||||
},
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("my-custom".into()),
|
||||
name: SharedString::from("My Custom"),
|
||||
is_builtin: false,
|
||||
},
|
||||
];
|
||||
|
||||
let entries = ProfilePickerDelegate::entries_from_candidates(&candidates);
|
||||
|
||||
assert!(entries.iter().any(|entry| matches!(
|
||||
entry,
|
||||
ProfilePickerEntry::Profile(profile)
|
||||
if candidates[profile.candidate_index].id.as_str() == "my-custom"
|
||||
)));
|
||||
assert!(entries.iter().any(|entry| matches!(
|
||||
entry,
|
||||
ProfilePickerEntry::Header(label) if label.as_ref() == "Custom Profiles"
|
||||
)));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn fuzzy_filter_returns_no_results_and_keeps_configure(cx: &mut TestAppContext) {
|
||||
let candidates = vec![ProfileCandidate {
|
||||
id: AgentProfileId("write".into()),
|
||||
name: SharedString::from("Write"),
|
||||
is_builtin: true,
|
||||
}];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: Vec::new(),
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
|
||||
let matches = Vec::new(); // No matches
|
||||
let _entries = delegate.entries_from_matches(matches);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn active_profile_selection_logic_works(cx: &mut TestAppContext) {
|
||||
let candidates = vec![
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("write".into()),
|
||||
name: SharedString::from("Write"),
|
||||
is_builtin: true,
|
||||
},
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("ask".into()),
|
||||
name: SharedString::from("Ask"),
|
||||
is_builtin: true,
|
||||
},
|
||||
];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: vec![
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 0,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 1,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
],
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
|
||||
// Active profile should be found at index 0
|
||||
let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
|
||||
assert_eq!(active_index, Some(0));
|
||||
}
|
||||
|
||||
struct TestProfileProvider {
|
||||
profile_id: AgentProfileId,
|
||||
}
|
||||
|
||||
impl TestProfileProvider {
|
||||
fn new(profile_id: AgentProfileId) -> Self {
|
||||
Self { profile_id }
|
||||
}
|
||||
}
|
||||
|
||||
impl ProfileProvider for TestProfileProvider {
|
||||
fn profile_id(&self, _cx: &App) -> AgentProfileId {
|
||||
self.profile_id.clone()
|
||||
}
|
||||
|
||||
fn set_profile(&self, _profile_id: AgentProfileId, _cx: &mut App) {}
|
||||
|
||||
fn profiles_supported(&self, _cx: &App) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn documentation_side(position: DockPosition) -> DocumentationSide {
|
||||
match position {
|
||||
DockPosition::Left => DocumentationSide::Right,
|
||||
DockPosition::Bottom => DocumentationSide::Left,
|
||||
DockPosition::Right => DocumentationSide::Left,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ use editor::{
|
||||
BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata, CustomBlockId, FoldId,
|
||||
RenderBlock, ToDisplayPoint,
|
||||
},
|
||||
scroll::ScrollOffset,
|
||||
};
|
||||
use editor::{FoldPlaceholder, display_map::CreaseId};
|
||||
use fs::Fs;
|
||||
@@ -108,7 +109,7 @@ pub enum InsertDraggedFiles {
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
struct ScrollPosition {
|
||||
offset_before_cursor: gpui::Point<f32>,
|
||||
offset_before_cursor: gpui::Point<ScrollOffset>,
|
||||
cursor: Anchor,
|
||||
}
|
||||
|
||||
@@ -631,7 +632,7 @@ impl TextThreadEditor {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let cursor_point = scroll_position.cursor.to_display_point(&snapshot);
|
||||
let scroll_top =
|
||||
cursor_point.row().as_f32() - scroll_position.offset_before_cursor.y;
|
||||
cursor_point.row().as_f64() - scroll_position.offset_before_cursor.y;
|
||||
editor.set_scroll_position(
|
||||
point(scroll_position.offset_before_cursor.x, scroll_top),
|
||||
window,
|
||||
@@ -979,7 +980,7 @@ impl TextThreadEditor {
|
||||
let cursor_row = cursor
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row()
|
||||
.as_f32();
|
||||
.as_f64();
|
||||
let scroll_position = editor
|
||||
.scroll_manager
|
||||
.anchor()
|
||||
|
||||
@@ -48,7 +48,7 @@ impl Render for BurnModeTooltip {
|
||||
let keybinding = KeyBinding::for_action(&ToggleBurnMode, window, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.)));
|
||||
|
||||
tooltip_container(window, cx, |this, _, _| {
|
||||
tooltip_container(cx, |this, _| {
|
||||
this
|
||||
.child(
|
||||
h_flex()
|
||||
|
||||
@@ -704,7 +704,7 @@ impl ContextPillHover {
|
||||
|
||||
impl Render for ContextPillHover {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(window, cx, move |this, window, cx| {
|
||||
tooltip_container(cx, move |this, cx| {
|
||||
this.occlude()
|
||||
.on_mouse_move(|_, _, cx| cx.stop_propagation())
|
||||
.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
|
||||
|
||||
@@ -40,7 +40,7 @@ impl AgentOnboardingModal {
|
||||
}
|
||||
|
||||
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.open_url("http://zed.dev/blog/fastest-ai-code-editor");
|
||||
cx.open_url("https://zed.dev/blog/fastest-ai-code-editor");
|
||||
cx.notify();
|
||||
|
||||
agent_onboarding_event!("Blog Link Clicked");
|
||||
|
||||
@@ -12,8 +12,8 @@ impl UnavailableEditingTooltip {
|
||||
}
|
||||
|
||||
impl Render for UnavailableEditingTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(window, cx, |this, _, _| {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(cx, |this, _| {
|
||||
this.child(Label::new("Unavailable Editing")).child(
|
||||
div().max_w_64().child(
|
||||
Label::new(format!(
|
||||
|
||||
@@ -67,7 +67,6 @@ pub enum Model {
|
||||
alias = "claude-opus-4-1-thinking-latest"
|
||||
)]
|
||||
ClaudeOpus4_1Thinking,
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
|
||||
ClaudeSonnet4,
|
||||
#[serde(
|
||||
@@ -75,6 +74,14 @@ pub enum Model {
|
||||
alias = "claude-sonnet-4-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4Thinking,
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
|
||||
ClaudeSonnet4_5,
|
||||
#[serde(
|
||||
rename = "claude-sonnet-4-5-thinking",
|
||||
alias = "claude-sonnet-4-5-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4_5Thinking,
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
@@ -133,6 +140,14 @@ impl Model {
|
||||
return Ok(Self::ClaudeOpus4);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-5-thinking") {
|
||||
return Ok(Self::ClaudeSonnet4_5Thinking);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-5") {
|
||||
return Ok(Self::ClaudeSonnet4_5);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-thinking") {
|
||||
return Ok(Self::ClaudeSonnet4Thinking);
|
||||
}
|
||||
@@ -180,6 +195,8 @@ impl Model {
|
||||
Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking-latest",
|
||||
Self::ClaudeSonnet4 => "claude-sonnet-4-latest",
|
||||
Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
|
||||
Self::ClaudeSonnet4_5 => "claude-sonnet-4-5-latest",
|
||||
Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking-latest",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Self::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
@@ -197,6 +214,7 @@ impl Model {
|
||||
Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514",
|
||||
Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-20250805",
|
||||
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
|
||||
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-20250929",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Self::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
@@ -215,6 +233,8 @@ impl Model {
|
||||
Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
|
||||
Self::ClaudeSonnet4 => "Claude Sonnet 4",
|
||||
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
|
||||
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
|
||||
Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
@@ -236,6 +256,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
@@ -261,6 +283,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
@@ -280,6 +304,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -299,6 +325,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -318,6 +346,7 @@ impl Model {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4_1
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
@@ -327,6 +356,7 @@ impl Model {
|
||||
Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
|
||||
budget_tokens: Some(4_096),
|
||||
},
|
||||
|
||||
@@ -85,11 +85,8 @@ impl AskPassSession {
|
||||
let askpass_script_path = temp_dir.path().join(ASKPASS_SCRIPT_NAME);
|
||||
let (askpass_opened_tx, askpass_opened_rx) = oneshot::channel::<()>();
|
||||
let listener = UnixListener::bind(&askpass_socket).context("creating askpass socket")?;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let zed_path = util::get_shell_safe_zed_path()?;
|
||||
#[cfg(target_os = "windows")]
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("finding current executable path for use in askpass")?;
|
||||
let zed_cli_path =
|
||||
util::get_shell_safe_zed_cli_path().context("getting zed-cli path for askpass")?;
|
||||
|
||||
let (askpass_kill_master_tx, askpass_kill_master_rx) = oneshot::channel::<()>();
|
||||
let mut kill_tx = Some(askpass_kill_master_tx);
|
||||
@@ -137,7 +134,7 @@ impl AskPassSession {
|
||||
});
|
||||
|
||||
// Create an askpass script that communicates back to this process.
|
||||
let askpass_script = generate_askpass_script(&zed_path, &askpass_socket);
|
||||
let askpass_script = generate_askpass_script(&zed_cli_path, &askpass_socket);
|
||||
fs::write(&askpass_script_path, askpass_script)
|
||||
.await
|
||||
.with_context(|| format!("creating askpass script at {askpass_script_path:?}"))?;
|
||||
@@ -254,10 +251,10 @@ pub fn main(socket: &str) {
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn generate_askpass_script(zed_path: &str, askpass_socket: &std::path::Path) -> String {
|
||||
fn generate_askpass_script(zed_cli_path: &str, askpass_socket: &std::path::Path) -> String {
|
||||
format!(
|
||||
"{shebang}\n{print_args} | {zed_exe} --askpass={askpass_socket} 2> /dev/null \n",
|
||||
zed_exe = zed_path,
|
||||
"{shebang}\n{print_args} | {zed_cli} --askpass={askpass_socket} 2> /dev/null \n",
|
||||
zed_cli = zed_cli_path,
|
||||
askpass_socket = askpass_socket.display(),
|
||||
print_args = "printf '%s\\0' \"$@\"",
|
||||
shebang = "#!/bin/sh",
|
||||
@@ -266,13 +263,13 @@ fn generate_askpass_script(zed_path: &str, askpass_socket: &std::path::Path) ->
|
||||
|
||||
#[inline]
|
||||
#[cfg(target_os = "windows")]
|
||||
fn generate_askpass_script(zed_path: &std::path::Path, askpass_socket: &std::path::Path) -> String {
|
||||
fn generate_askpass_script(zed_cli_path: &str, askpass_socket: &std::path::Path) -> String {
|
||||
format!(
|
||||
r#"
|
||||
$ErrorActionPreference = 'Stop';
|
||||
($args -join [char]0) | & "{zed_exe}" --askpass={askpass_socket} 2> $null
|
||||
($args -join [char]0) | & "{zed_cli}" --askpass={askpass_socket} 2> $null
|
||||
"#,
|
||||
zed_exe = zed_path.display(),
|
||||
zed_cli = zed_cli_path,
|
||||
askpass_socket = askpass_socket.display(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ impl TryFrom<&str> for EncryptedPassword {
|
||||
unsafe {
|
||||
CryptProtectMemory(
|
||||
value.as_mut_ptr() as _,
|
||||
len,
|
||||
padded_length,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)?;
|
||||
}
|
||||
@@ -97,7 +97,7 @@ pub(crate) fn decrypt(mut password: EncryptedPassword) -> Result<String> {
|
||||
unsafe {
|
||||
CryptUnprotectMemory(
|
||||
password.0.as_mut_ptr() as _,
|
||||
password.1,
|
||||
password.0.len().try_into()?,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)
|
||||
.context("while decrypting a SSH password")?
|
||||
|
||||
@@ -6,7 +6,7 @@ use assistant_slash_command::{
|
||||
use fuzzy::{PathMatch, StringMatchCandidate};
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use language::{
|
||||
Anchor, BufferSnapshot, DiagnosticEntry, DiagnosticSeverity, LspAdapterDelegate,
|
||||
Anchor, BufferSnapshot, DiagnosticEntryRef, DiagnosticSeverity, LspAdapterDelegate,
|
||||
OffsetRangeExt, ToOffset,
|
||||
};
|
||||
use project::{DiagnosticSummary, PathMatchCandidateSet, Project};
|
||||
@@ -367,7 +367,7 @@ pub fn collect_buffer_diagnostics(
|
||||
|
||||
fn collect_diagnostic(
|
||||
output: &mut SlashCommandOutput,
|
||||
entry: &DiagnosticEntry<Anchor>,
|
||||
entry: &DiagnosticEntryRef<'_, Anchor>,
|
||||
snapshot: &BufferSnapshot,
|
||||
include_warnings: bool,
|
||||
) {
|
||||
|
||||
@@ -17,7 +17,7 @@ use editor::{
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between, px,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between,
|
||||
};
|
||||
use indoc::formatdoc;
|
||||
use language::{
|
||||
@@ -1003,7 +1003,7 @@ impl ToolCard for EditFileToolCard {
|
||||
font_size: Some(
|
||||
TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_font_size(cx))
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_ui_font_size(cx))
|
||||
.into(),
|
||||
),
|
||||
..TextStyleRefinement::default()
|
||||
@@ -1102,7 +1102,7 @@ impl ToolCard for EditFileToolCard {
|
||||
.relative()
|
||||
.h_full()
|
||||
.when(!self.full_height_expanded, |editor_container| {
|
||||
editor_container.max_h(px(COLLAPSED_LINES as f32 * editor_line_height.0))
|
||||
editor_container.max_h(COLLAPSED_LINES as f32 * editor_line_height)
|
||||
})
|
||||
.overflow_hidden()
|
||||
.border_t_1()
|
||||
@@ -1161,7 +1161,7 @@ async fn build_buffer(
|
||||
LineEnding::normalize(&mut text);
|
||||
let text = Rope::from(text);
|
||||
let language = cx
|
||||
.update(|_cx| language_registry.language_for_file_path(&path))?
|
||||
.update(|_cx| language_registry.load_language_for_file_path(&path))?
|
||||
.await
|
||||
.ok();
|
||||
let buffer = cx.new(|cx| {
|
||||
|
||||
@@ -27,6 +27,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use task::{Shell, ShellBuilder};
|
||||
use terminal::terminal_settings::TerminalSettings;
|
||||
use terminal_view::TerminalView;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*};
|
||||
@@ -119,17 +120,20 @@ impl Tool for TerminalTool {
|
||||
};
|
||||
|
||||
let cwd = working_dir.clone();
|
||||
let env = match &working_dir {
|
||||
let env = match &cwd {
|
||||
Some(dir) => project.update(cx, |project, cx| {
|
||||
project.directory_environment(dir.as_path().into(), cx)
|
||||
let shell = TerminalSettings::get_global(cx).shell.clone();
|
||||
project.directory_environment(&shell, dir.as_path().into(), cx)
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
let remote_shell = project.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
});
|
||||
let shell = project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})
|
||||
.unwrap_or_else(|| get_default_system_shell());
|
||||
|
||||
let env = cx.spawn(async move |_| {
|
||||
let mut env = env.await.unwrap_or_default();
|
||||
@@ -142,12 +146,9 @@ impl Tool for TerminalTool {
|
||||
let build_cmd = {
|
||||
let input_command = input.command.clone();
|
||||
move || {
|
||||
ShellBuilder::new(
|
||||
remote_shell.as_deref(),
|
||||
&Shell::Program(get_default_system_shell()),
|
||||
)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(input_command.clone()), &[])
|
||||
ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(input_command), &[])
|
||||
}
|
||||
};
|
||||
|
||||
@@ -476,7 +477,7 @@ impl ToolCard for TerminalToolCard {
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.or_else(|| env::current_dir().ok())
|
||||
.map(|path| format!("{}", path.display()))
|
||||
.map(|path| path.display().to_string())
|
||||
.unwrap_or_else(|| "current directory".to_string());
|
||||
|
||||
let header = h_flex()
|
||||
|
||||
@@ -22,7 +22,6 @@ denoise = { path = "../denoise" }
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
rodio = { workspace = true, features = [ "wav", "playback", "wav_output" ] }
|
||||
rubato = "0.16.2"
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
|
||||
@@ -1,17 +1,26 @@
|
||||
use std::{num::NonZero, time::Duration};
|
||||
use std::{
|
||||
num::NonZero,
|
||||
sync::{
|
||||
Arc, Mutex,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use crossbeam::queue::ArrayQueue;
|
||||
use denoise::{Denoiser, DenoiserError};
|
||||
use log::warn;
|
||||
use rodio::{ChannelCount, Sample, SampleRate, Source, conversions::ChannelCountConverter, nz};
|
||||
|
||||
use crate::rodio_ext::resample::FixedResampler;
|
||||
pub use replayable::{Replay, ReplayDurationTooShort, Replayable};
|
||||
|
||||
mod replayable;
|
||||
mod resample;
|
||||
use rodio::{
|
||||
ChannelCount, Sample, SampleRate, Source, conversions::SampleRateConverter, nz,
|
||||
source::UniformSourceIterator,
|
||||
};
|
||||
|
||||
const MAX_CHANNELS: usize = 8;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("Replay duration is too short must be >= 100ms")]
|
||||
pub struct ReplayDurationTooShort;
|
||||
|
||||
// These all require constant sources (so the span is infinitely long)
|
||||
// this is not guaranteed by rodio however we know it to be true in all our
|
||||
// applications. Rodio desperately needs a constant source concept.
|
||||
@@ -32,8 +41,8 @@ pub trait RodioExt: Source + Sized {
|
||||
self,
|
||||
channel_count: ChannelCount,
|
||||
sample_rate: SampleRate,
|
||||
) -> ConstantChannelCount<FixedResampler<Self>>;
|
||||
fn constant_samplerate(self, sample_rate: SampleRate) -> FixedResampler<Self>;
|
||||
) -> UniformSourceIterator<Self>;
|
||||
fn constant_samplerate(self, sample_rate: SampleRate) -> ConstantSampleRate<Self>;
|
||||
fn possibly_disconnected_channels_to_mono(self) -> ToMono<Self>;
|
||||
}
|
||||
|
||||
@@ -72,7 +81,38 @@ impl<S: Source> RodioExt for S {
|
||||
self,
|
||||
duration: Duration,
|
||||
) -> Result<(Replay, Replayable<Self>), ReplayDurationTooShort> {
|
||||
replayable::replayable(self, duration)
|
||||
if duration < Duration::from_millis(100) {
|
||||
return Err(ReplayDurationTooShort);
|
||||
}
|
||||
|
||||
let samples_per_second = self.sample_rate().get() as usize * self.channels().get() as usize;
|
||||
let samples_to_queue = duration.as_secs_f64() * samples_per_second as f64;
|
||||
let samples_to_queue =
|
||||
(samples_to_queue as usize).next_multiple_of(self.channels().get().into());
|
||||
|
||||
let chunk_size =
|
||||
(samples_per_second.div_ceil(10)).next_multiple_of(self.channels().get() as usize);
|
||||
let chunks_to_queue = samples_to_queue.div_ceil(chunk_size);
|
||||
|
||||
let is_active = Arc::new(AtomicBool::new(true));
|
||||
let queue = Arc::new(ReplayQueue::new(chunks_to_queue, chunk_size));
|
||||
Ok((
|
||||
Replay {
|
||||
rx: Arc::clone(&queue),
|
||||
buffer: Vec::new().into_iter(),
|
||||
sleep_duration: duration / 2,
|
||||
sample_rate: self.sample_rate(),
|
||||
channel_count: self.channels(),
|
||||
source_is_active: is_active.clone(),
|
||||
},
|
||||
Replayable {
|
||||
tx: queue,
|
||||
inner: self,
|
||||
buffer: Vec::with_capacity(chunk_size),
|
||||
chunk_size,
|
||||
is_active,
|
||||
},
|
||||
))
|
||||
}
|
||||
fn take_samples(self, n: usize) -> TakeSamples<S> {
|
||||
TakeSamples {
|
||||
@@ -88,37 +128,37 @@ impl<S: Source> RodioExt for S {
|
||||
self,
|
||||
channel_count: ChannelCount,
|
||||
sample_rate: SampleRate,
|
||||
) -> ConstantChannelCount<FixedResampler<Self>> {
|
||||
ConstantChannelCount::new(self.constant_samplerate(sample_rate), channel_count)
|
||||
) -> UniformSourceIterator<Self> {
|
||||
UniformSourceIterator::new(self, channel_count, sample_rate)
|
||||
}
|
||||
fn constant_samplerate(self, sample_rate: SampleRate) -> FixedResampler<Self> {
|
||||
FixedResampler::new(self, sample_rate)
|
||||
fn constant_samplerate(self, sample_rate: SampleRate) -> ConstantSampleRate<Self> {
|
||||
ConstantSampleRate::new(self, sample_rate)
|
||||
}
|
||||
fn possibly_disconnected_channels_to_mono(self) -> ToMono<Self> {
|
||||
ToMono::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ConstantChannelCount<S: Source> {
|
||||
inner: ChannelCountConverter<S>,
|
||||
pub struct ConstantSampleRate<S: Source> {
|
||||
inner: SampleRateConverter<S>,
|
||||
channels: ChannelCount,
|
||||
sample_rate: SampleRate,
|
||||
}
|
||||
|
||||
impl<S: Source> ConstantChannelCount<S> {
|
||||
fn new(source: S, target_channels: ChannelCount) -> Self {
|
||||
let input_channels = source.channels();
|
||||
let sample_rate = source.sample_rate();
|
||||
let inner = ChannelCountConverter::new(source, input_channels, target_channels);
|
||||
impl<S: Source> ConstantSampleRate<S> {
|
||||
fn new(source: S, target_rate: SampleRate) -> Self {
|
||||
let input_sample_rate = source.sample_rate();
|
||||
let channels = source.channels();
|
||||
let inner = SampleRateConverter::new(source, input_sample_rate, target_rate, channels);
|
||||
Self {
|
||||
sample_rate,
|
||||
inner,
|
||||
channels: target_channels,
|
||||
channels,
|
||||
sample_rate: target_rate,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Iterator for ConstantChannelCount<S> {
|
||||
impl<S: Source> Iterator for ConstantSampleRate<S> {
|
||||
type Item = rodio::Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@@ -130,7 +170,7 @@ impl<S: Source> Iterator for ConstantChannelCount<S> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Source for ConstantChannelCount<S> {
|
||||
impl<S: Source> Source for ConstantSampleRate<S> {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None
|
||||
}
|
||||
@@ -267,6 +307,53 @@ impl<S: Source> Source for TakeSamples<S> {
|
||||
}
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
#[derive(Debug)]
|
||||
struct ReplayQueue {
|
||||
inner: ArrayQueue<Vec<Sample>>,
|
||||
normal_chunk_len: usize,
|
||||
/// The last chunk in the queue may be smaller than
|
||||
/// the normal chunk size. This is always equal to the
|
||||
/// size of the last element in the queue.
|
||||
/// (so normally chunk_size)
|
||||
last_chunk: Mutex<Vec<Sample>>,
|
||||
}
|
||||
|
||||
impl ReplayQueue {
|
||||
fn new(queue_len: usize, chunk_size: usize) -> Self {
|
||||
Self {
|
||||
inner: ArrayQueue::new(queue_len),
|
||||
normal_chunk_len: chunk_size,
|
||||
last_chunk: Mutex::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
/// Returns the length in samples
|
||||
fn len(&self) -> usize {
|
||||
self.inner.len().saturating_sub(1) * self.normal_chunk_len
|
||||
+ self
|
||||
.last_chunk
|
||||
.lock()
|
||||
.expect("Self::push_last can not poison this lock")
|
||||
.len()
|
||||
}
|
||||
|
||||
fn pop(&self) -> Option<Vec<Sample>> {
|
||||
self.inner.pop() // removes element that was inserted first
|
||||
}
|
||||
|
||||
fn push_last(&self, mut samples: Vec<Sample>) {
|
||||
let mut last_chunk = self
|
||||
.last_chunk
|
||||
.lock()
|
||||
.expect("Self::len can not poison this lock");
|
||||
std::mem::swap(&mut *last_chunk, &mut samples);
|
||||
}
|
||||
|
||||
fn push_normal(&self, samples: Vec<Sample>) {
|
||||
let _pushed_out_of_ringbuf = self.inner.force_push(samples);
|
||||
}
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
pub struct ProcessBuffer<const N: usize, S, F>
|
||||
where
|
||||
@@ -400,15 +487,147 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
#[derive(Debug)]
|
||||
pub struct Replayable<S: Source> {
|
||||
inner: S,
|
||||
buffer: Vec<Sample>,
|
||||
chunk_size: usize,
|
||||
tx: Arc<ReplayQueue>,
|
||||
is_active: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl<S: Source> Iterator for Replayable<S> {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.inner.next() {
|
||||
self.buffer.push(sample);
|
||||
// If the buffer is full send it
|
||||
if self.buffer.len() == self.chunk_size {
|
||||
self.tx.push_normal(std::mem::take(&mut self.buffer));
|
||||
}
|
||||
Some(sample)
|
||||
} else {
|
||||
let last_chunk = std::mem::take(&mut self.buffer);
|
||||
self.tx.push_last(last_chunk);
|
||||
self.is_active.store(false, Ordering::Relaxed);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Source for Replayable<S> {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
self.inner.current_span_len()
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
self.inner.total_duration()
|
||||
}
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
#[derive(Debug)]
|
||||
pub struct Replay {
|
||||
rx: Arc<ReplayQueue>,
|
||||
buffer: std::vec::IntoIter<Sample>,
|
||||
sleep_duration: Duration,
|
||||
sample_rate: SampleRate,
|
||||
channel_count: ChannelCount,
|
||||
source_is_active: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl Replay {
|
||||
pub fn source_is_active(&self) -> bool {
|
||||
// - source could return None and not drop
|
||||
// - source could be dropped before returning None
|
||||
self.source_is_active.load(Ordering::Relaxed) && Arc::strong_count(&self.rx) < 2
|
||||
}
|
||||
|
||||
/// Duration of what is in the buffer and can be returned without blocking.
|
||||
pub fn duration_ready(&self) -> Duration {
|
||||
let samples_per_second = self.channels().get() as u32 * self.sample_rate().get();
|
||||
|
||||
let seconds_queued = self.samples_ready() as f64 / samples_per_second as f64;
|
||||
Duration::from_secs_f64(seconds_queued)
|
||||
}
|
||||
|
||||
/// Number of samples in the buffer and can be returned without blocking.
|
||||
pub fn samples_ready(&self) -> usize {
|
||||
self.rx.len() + self.buffer.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Replay {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.buffer.next() {
|
||||
return Some(sample);
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(new_buffer) = self.rx.pop() {
|
||||
self.buffer = new_buffer.into_iter();
|
||||
return self.buffer.next();
|
||||
}
|
||||
|
||||
if !self.source_is_active() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// The queue does not support blocking on a next item. We want this queue as it
|
||||
// is quite fast and provides a fixed size. We know how many samples are in a
|
||||
// buffer so if we do not get one now we must be getting one after `sleep_duration`.
|
||||
std::thread::sleep(self.sleep_duration);
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
((self.rx.len() + self.buffer.len()), None)
|
||||
}
|
||||
}
|
||||
|
||||
impl Source for Replay {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None // source is not compatible with spans
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.channel_count
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.sample_rate
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rodio::{nz, static_buffer::StaticSamplesBuffer};
|
||||
|
||||
use super::*;
|
||||
|
||||
pub const SAMPLES: [Sample; 5] = [0.0, 1.0, 2.0, 3.0, 4.0];
|
||||
const SAMPLES: [Sample; 5] = [0.0, 1.0, 2.0, 3.0, 4.0];
|
||||
|
||||
pub fn test_source() -> StaticSamplesBuffer {
|
||||
fn test_source() -> StaticSamplesBuffer {
|
||||
StaticSamplesBuffer::new(nz!(1), nz!(1), &SAMPLES)
|
||||
}
|
||||
|
||||
@@ -471,4 +690,74 @@ mod tests {
|
||||
assert_eq!(yielded, SAMPLES.len())
|
||||
}
|
||||
}
|
||||
|
||||
mod instant_replay {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn continues_after_history() {
|
||||
let input = test_source();
|
||||
|
||||
let (mut replay, mut source) = input
|
||||
.replayable(Duration::from_secs(3))
|
||||
.expect("longer than 100ms");
|
||||
|
||||
source.by_ref().take(3).count();
|
||||
let yielded: Vec<Sample> = replay.by_ref().take(3).collect();
|
||||
assert_eq!(&yielded, &SAMPLES[0..3],);
|
||||
|
||||
source.count();
|
||||
let yielded: Vec<Sample> = replay.collect();
|
||||
assert_eq!(&yielded, &SAMPLES[3..5],);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_only_latest() {
|
||||
let input = test_source();
|
||||
|
||||
let (mut replay, mut source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer than 100ms");
|
||||
|
||||
source.by_ref().take(5).count(); // get all items but do not end the source
|
||||
let yielded: Vec<Sample> = replay.by_ref().take(2).collect();
|
||||
assert_eq!(&yielded, &SAMPLES[3..5]);
|
||||
source.count(); // exhaust source
|
||||
assert_eq!(replay.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_correct_amount_of_seconds() {
|
||||
let input = StaticSamplesBuffer::new(nz!(1), nz!(16_000), &[0.0; 40_000]);
|
||||
|
||||
let (replay, mut source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer than 100ms");
|
||||
|
||||
// exhaust but do not yet end source
|
||||
source.by_ref().take(40_000).count();
|
||||
|
||||
// take all samples we can without blocking
|
||||
let ready = replay.samples_ready();
|
||||
let n_yielded = replay.take_samples(ready).count();
|
||||
|
||||
let max = source.sample_rate().get() * source.channels().get() as u32 * 2;
|
||||
let margin = 16_000 / 10; // 100ms
|
||||
assert!(n_yielded as u32 >= max - margin);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn samples_ready() {
|
||||
let input = StaticSamplesBuffer::new(nz!(1), nz!(16_000), &[0.0; 40_000]);
|
||||
let (mut replay, source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer than 100ms");
|
||||
assert_eq!(replay.by_ref().samples_ready(), 0);
|
||||
|
||||
source.take(8000).count(); // half a second
|
||||
let margin = 16_000 / 10; // 100ms
|
||||
let ready = replay.samples_ready();
|
||||
assert!(ready >= 8000 - margin);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,308 +0,0 @@
|
||||
use std::{
|
||||
sync::{
|
||||
Arc, Mutex,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use crossbeam::queue::ArrayQueue;
|
||||
use rodio::{ChannelCount, Sample, SampleRate, Source};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("Replay duration is too short must be >= 100ms")]
|
||||
pub struct ReplayDurationTooShort;
|
||||
|
||||
pub fn replayable<S: Source>(
|
||||
source: S,
|
||||
duration: Duration,
|
||||
) -> Result<(Replay, Replayable<S>), ReplayDurationTooShort> {
|
||||
if duration < Duration::from_millis(100) {
|
||||
return Err(ReplayDurationTooShort);
|
||||
}
|
||||
|
||||
let samples_per_second = source.sample_rate().get() as usize * source.channels().get() as usize;
|
||||
let samples_to_queue = duration.as_secs_f64() * samples_per_second as f64;
|
||||
let samples_to_queue =
|
||||
(samples_to_queue as usize).next_multiple_of(source.channels().get().into());
|
||||
|
||||
let chunk_size =
|
||||
(samples_per_second.div_ceil(10)).next_multiple_of(source.channels().get() as usize);
|
||||
let chunks_to_queue = samples_to_queue.div_ceil(chunk_size);
|
||||
|
||||
let is_active = Arc::new(AtomicBool::new(true));
|
||||
let queue = Arc::new(ReplayQueue::new(chunks_to_queue, chunk_size));
|
||||
Ok((
|
||||
Replay {
|
||||
rx: Arc::clone(&queue),
|
||||
buffer: Vec::new().into_iter(),
|
||||
sleep_duration: duration / 2,
|
||||
sample_rate: source.sample_rate(),
|
||||
channel_count: source.channels(),
|
||||
source_is_active: is_active.clone(),
|
||||
},
|
||||
Replayable {
|
||||
tx: queue,
|
||||
inner: source,
|
||||
buffer: Vec::with_capacity(chunk_size),
|
||||
chunk_size,
|
||||
is_active,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
#[derive(Debug)]
|
||||
struct ReplayQueue {
|
||||
inner: ArrayQueue<Vec<Sample>>,
|
||||
normal_chunk_len: usize,
|
||||
/// The last chunk in the queue may be smaller than
|
||||
/// the normal chunk size. This is always equal to the
|
||||
/// size of the last element in the queue.
|
||||
/// (so normally chunk_size)
|
||||
last_chunk: Mutex<Vec<Sample>>,
|
||||
}
|
||||
|
||||
impl ReplayQueue {
|
||||
fn new(queue_len: usize, chunk_size: usize) -> Self {
|
||||
Self {
|
||||
inner: ArrayQueue::new(queue_len),
|
||||
normal_chunk_len: chunk_size,
|
||||
last_chunk: Mutex::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
/// Returns the length in samples
|
||||
fn len(&self) -> usize {
|
||||
self.inner.len().saturating_sub(1) * self.normal_chunk_len
|
||||
+ self
|
||||
.last_chunk
|
||||
.lock()
|
||||
.expect("Self::push_last can not poison this lock")
|
||||
.len()
|
||||
}
|
||||
|
||||
fn pop(&self) -> Option<Vec<Sample>> {
|
||||
self.inner.pop() // removes element that was inserted first
|
||||
}
|
||||
|
||||
fn push_last(&self, mut samples: Vec<Sample>) {
|
||||
let mut last_chunk = self
|
||||
.last_chunk
|
||||
.lock()
|
||||
.expect("Self::len can not poison this lock");
|
||||
std::mem::swap(&mut *last_chunk, &mut samples);
|
||||
}
|
||||
|
||||
fn push_normal(&self, samples: Vec<Sample>) {
|
||||
let _pushed_out_of_ringbuf = self.inner.force_push(samples);
|
||||
}
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
#[derive(Debug)]
|
||||
pub struct Replayable<S: Source> {
|
||||
inner: S,
|
||||
buffer: Vec<Sample>,
|
||||
chunk_size: usize,
|
||||
tx: Arc<ReplayQueue>,
|
||||
is_active: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl<S: Source> Iterator for Replayable<S> {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.inner.next() {
|
||||
self.buffer.push(sample);
|
||||
// If the buffer is full send it
|
||||
if self.buffer.len() == self.chunk_size {
|
||||
self.tx.push_normal(std::mem::take(&mut self.buffer));
|
||||
}
|
||||
Some(sample)
|
||||
} else {
|
||||
let last_chunk = std::mem::take(&mut self.buffer);
|
||||
self.tx.push_last(last_chunk);
|
||||
self.is_active.store(false, Ordering::Relaxed);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Source for Replayable<S> {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
self.inner.current_span_len()
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
self.inner.total_duration()
|
||||
}
|
||||
}
|
||||
|
||||
/// constant source, only works on a single span
|
||||
#[derive(Debug)]
|
||||
pub struct Replay {
|
||||
rx: Arc<ReplayQueue>,
|
||||
buffer: std::vec::IntoIter<Sample>,
|
||||
sleep_duration: Duration,
|
||||
sample_rate: SampleRate,
|
||||
channel_count: ChannelCount,
|
||||
source_is_active: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl Replay {
|
||||
pub fn source_is_active(&self) -> bool {
|
||||
// - source could return None and not drop
|
||||
// - source could be dropped before returning None
|
||||
self.source_is_active.load(Ordering::Relaxed) && Arc::strong_count(&self.rx) < 2
|
||||
}
|
||||
|
||||
/// Duration of what is in the buffer and can be returned without blocking.
|
||||
pub fn duration_ready(&self) -> Duration {
|
||||
let samples_per_second = self.channels().get() as u32 * self.sample_rate().get();
|
||||
|
||||
let seconds_queued = self.samples_ready() as f64 / samples_per_second as f64;
|
||||
Duration::from_secs_f64(seconds_queued)
|
||||
}
|
||||
|
||||
/// Number of samples in the buffer and can be returned without blocking.
|
||||
pub fn samples_ready(&self) -> usize {
|
||||
self.rx.len() + self.buffer.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Replay {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.buffer.next() {
|
||||
return Some(sample);
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(new_buffer) = self.rx.pop() {
|
||||
self.buffer = new_buffer.into_iter();
|
||||
return self.buffer.next();
|
||||
}
|
||||
|
||||
if !self.source_is_active() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// The queue does not support blocking on a next item. We want this queue as it
|
||||
// is quite fast and provides a fixed size. We know how many samples are in a
|
||||
// buffer so if we do not get one now we must be getting one after `sleep_duration`.
|
||||
std::thread::sleep(self.sleep_duration);
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
((self.rx.len() + self.buffer.len()), None)
|
||||
}
|
||||
}
|
||||
|
||||
impl Source for Replay {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None // source is not compatible with spans
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.channel_count
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.sample_rate
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rodio::{nz, static_buffer::StaticSamplesBuffer};
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
RodioExt,
|
||||
rodio_ext::tests::{SAMPLES, test_source},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn continues_after_history() {
|
||||
let input = test_source();
|
||||
|
||||
let (mut replay, mut source) = input
|
||||
.replayable(Duration::from_secs(3))
|
||||
.expect("longer than 100ms");
|
||||
|
||||
source.by_ref().take(3).count();
|
||||
let yielded: Vec<Sample> = replay.by_ref().take(3).collect();
|
||||
assert_eq!(&yielded, &SAMPLES[0..3],);
|
||||
|
||||
source.count();
|
||||
let yielded: Vec<Sample> = replay.collect();
|
||||
assert_eq!(&yielded, &SAMPLES[3..5],);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_only_latest() {
|
||||
let input = test_source();
|
||||
|
||||
let (mut replay, mut source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer than 100ms");
|
||||
|
||||
source.by_ref().take(5).count(); // get all items but do not end the source
|
||||
let yielded: Vec<Sample> = replay.by_ref().take(2).collect();
|
||||
assert_eq!(&yielded, &SAMPLES[3..5]);
|
||||
source.count(); // exhaust source
|
||||
assert_eq!(replay.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_correct_amount_of_seconds() {
|
||||
let input = StaticSamplesBuffer::new(nz!(1), nz!(16_000), &[0.0; 40_000]);
|
||||
|
||||
let (replay, mut source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer than 100ms");
|
||||
|
||||
// exhaust but do not yet end source
|
||||
source.by_ref().take(40_000).count();
|
||||
|
||||
// take all samples we can without blocking
|
||||
let ready = replay.samples_ready();
|
||||
let n_yielded = replay.take_samples(ready).count();
|
||||
|
||||
let max = source.sample_rate().get() * source.channels().get() as u32 * 2;
|
||||
let margin = 16_000 / 10; // 100ms
|
||||
assert!(n_yielded as u32 >= max - margin);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn samples_ready() {
|
||||
let input = StaticSamplesBuffer::new(nz!(1), nz!(16_000), &[0.0; 40_000]);
|
||||
let (mut replay, source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer than 100ms");
|
||||
assert_eq!(replay.by_ref().samples_ready(), 0);
|
||||
|
||||
source.take(8000).count(); // half a second
|
||||
let margin = 16_000 / 10; // 100ms
|
||||
let ready = replay.samples_ready();
|
||||
assert!(ready >= 8000 - margin);
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use rodio::{Sample, SampleRate, Source};
|
||||
use rubato::{FftFixedInOut, Resampler};
|
||||
|
||||
pub struct FixedResampler<S> {
|
||||
input: S,
|
||||
next_channel: usize,
|
||||
next_frame: usize,
|
||||
output_buffer: Vec<Vec<Sample>>,
|
||||
input_buffer: Vec<Vec<Sample>>,
|
||||
target_sample_rate: SampleRate,
|
||||
resampler: FftFixedInOut<Sample>,
|
||||
}
|
||||
|
||||
impl<S: Source> FixedResampler<S> {
|
||||
pub fn new(input: S, target_sample_rate: SampleRate) -> Self {
|
||||
let chunk_size_in =
|
||||
Duration::from_millis(50).as_secs_f32() * input.sample_rate().get() as f32;
|
||||
let chunk_size_in = chunk_size_in.ceil() as usize;
|
||||
|
||||
let resampler = FftFixedInOut::new(
|
||||
input.sample_rate().get() as usize,
|
||||
target_sample_rate.get() as usize,
|
||||
chunk_size_in,
|
||||
input.channels().get() as usize,
|
||||
)
|
||||
.expect(
|
||||
"sample rates are non zero, and we are not changing it so there is no resample ratio",
|
||||
);
|
||||
|
||||
Self {
|
||||
next_channel: 0,
|
||||
next_frame: 0,
|
||||
output_buffer: resampler.output_buffer_allocate(true),
|
||||
input_buffer: resampler.input_buffer_allocate(false),
|
||||
target_sample_rate,
|
||||
resampler,
|
||||
input,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Source for FixedResampler<S> {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None
|
||||
}
|
||||
|
||||
fn channels(&self) -> rodio::ChannelCount {
|
||||
self.input.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> rodio::SampleRate {
|
||||
self.target_sample_rate
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<std::time::Duration> {
|
||||
self.input.total_duration()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> FixedResampler<S> {
|
||||
fn next_sample(&mut self) -> Option<Sample> {
|
||||
let sample = self.output_buffer[self.next_channel]
|
||||
.get(self.next_frame)
|
||||
.copied();
|
||||
self.next_channel = (self.next_channel + 1) % self.input.channels().get() as usize;
|
||||
self.next_frame += 1;
|
||||
|
||||
sample
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Iterator for FixedResampler<S> {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.next_sample() {
|
||||
return Some(sample);
|
||||
}
|
||||
|
||||
for input_channel in &mut self.input_buffer {
|
||||
input_channel.clear();
|
||||
}
|
||||
|
||||
for _ in 0..self.resampler.input_frames_next() {
|
||||
for input_channel in &mut self.input_buffer {
|
||||
input_channel.push(self.input.next()?);
|
||||
}
|
||||
}
|
||||
|
||||
self.resampler
|
||||
.process_into_buffer(&mut self.input_buffer, &mut self.output_buffer, None).expect("Input and output buffer channels are correct as they have been set by the resampler. The buffer for each channel is the same length. The buffer length is what is requested the resampler.");
|
||||
|
||||
self.next_frame = 0;
|
||||
self.next_sample()
|
||||
}
|
||||
}
|
||||
@@ -38,6 +38,20 @@ pub(crate) const JOBS: &[Job] = &[
|
||||
std::fs::remove_file(&zed_wsl)
|
||||
.context(format!("Failed to remove old file {}", zed_wsl.display()))
|
||||
},
|
||||
|app_dir| {
|
||||
let open_console = app_dir.join("OpenConsole.exe");
|
||||
log::info!("Removing old file: {}", open_console.display());
|
||||
std::fs::remove_file(&open_console).context(format!(
|
||||
"Failed to remove old file {}",
|
||||
open_console.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let conpty = app_dir.join("conpty.dll");
|
||||
log::info!("Removing old file: {}", conpty.display());
|
||||
std::fs::remove_file(&conpty)
|
||||
.context(format!("Failed to remove old file {}", conpty.display()))
|
||||
},
|
||||
// Copy new files
|
||||
|app_dir| {
|
||||
let zed_executable_source = app_dir.join("install\\Zed.exe");
|
||||
@@ -87,6 +101,38 @@ pub(crate) const JOBS: &[Job] = &[
|
||||
zed_wsl_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let open_console_source = app_dir.join("install\\OpenConsole.exe");
|
||||
let open_console_dest = app_dir.join("OpenConsole.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
open_console_source.display(),
|
||||
open_console_dest.display()
|
||||
);
|
||||
std::fs::copy(&open_console_source, &open_console_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
open_console_source.display(),
|
||||
open_console_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let conpty_source = app_dir.join("install\\conpty.dll");
|
||||
let conpty_dest = app_dir.join("conpty.dll");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
conpty_source.display(),
|
||||
conpty_dest.display()
|
||||
);
|
||||
std::fs::copy(&conpty_source, &conpty_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
conpty_source.display(),
|
||||
conpty_dest.display()
|
||||
))
|
||||
},
|
||||
// Clean up installer folder and updates folder
|
||||
|app_dir| {
|
||||
let updates_folder = app_dir.join("updates");
|
||||
|
||||
@@ -22,7 +22,6 @@ pub struct BedrockModelCacheConfiguration {
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
// Anthropic models (already included)
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
|
||||
ClaudeSonnet4,
|
||||
#[serde(
|
||||
@@ -30,6 +29,14 @@ pub enum Model {
|
||||
alias = "claude-sonnet-4-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4Thinking,
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
|
||||
ClaudeSonnet4_5,
|
||||
#[serde(
|
||||
rename = "claude-sonnet-4-5-thinking",
|
||||
alias = "claude-sonnet-4-5-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4_5Thinking,
|
||||
#[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
|
||||
ClaudeOpus4,
|
||||
#[serde(rename = "claude-opus-4-1", alias = "claude-opus-4-1-latest")]
|
||||
@@ -144,6 +151,14 @@ impl Model {
|
||||
Ok(Self::Claude3_7Sonnet)
|
||||
} else if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
Ok(Self::Claude3_7SonnetThinking)
|
||||
} else if id.starts_with("claude-sonnet-4-5-thinking") {
|
||||
Ok(Self::ClaudeSonnet4_5Thinking)
|
||||
} else if id.starts_with("claude-sonnet-4-5") {
|
||||
Ok(Self::ClaudeSonnet4_5)
|
||||
} else if id.starts_with("claude-sonnet-4-thinking") {
|
||||
Ok(Self::ClaudeSonnet4Thinking)
|
||||
} else if id.starts_with("claude-sonnet-4") {
|
||||
Ok(Self::ClaudeSonnet4)
|
||||
} else {
|
||||
anyhow::bail!("invalid model id {id}");
|
||||
}
|
||||
@@ -153,6 +168,8 @@ impl Model {
|
||||
match self {
|
||||
Model::ClaudeSonnet4 => "claude-sonnet-4",
|
||||
Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking",
|
||||
Model::ClaudeSonnet4_5 => "claude-sonnet-4-5",
|
||||
Model::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking",
|
||||
Model::ClaudeOpus4 => "claude-opus-4",
|
||||
Model::ClaudeOpus4_1 => "claude-opus-4-1",
|
||||
Model::ClaudeOpus4Thinking => "claude-opus-4-thinking",
|
||||
@@ -214,6 +231,9 @@ impl Model {
|
||||
Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => {
|
||||
"anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
}
|
||||
Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking => {
|
||||
"anthropic.claude-sonnet-4-5-20250929-v1:0"
|
||||
}
|
||||
Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => {
|
||||
"anthropic.claude-opus-4-20250514-v1:0"
|
||||
}
|
||||
@@ -277,6 +297,8 @@ impl Model {
|
||||
match self {
|
||||
Self::ClaudeSonnet4 => "Claude Sonnet 4",
|
||||
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
|
||||
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
|
||||
Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
|
||||
Self::ClaudeOpus4 => "Claude Opus 4",
|
||||
Self::ClaudeOpus4_1 => "Claude Opus 4.1",
|
||||
Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
|
||||
@@ -346,6 +368,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4_1
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeOpus4_1Thinking => 200_000,
|
||||
Self::AmazonNovaPremier => 1_000_000,
|
||||
@@ -361,6 +385,7 @@ impl Model {
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
|
||||
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
|
||||
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
|
||||
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => 64_000,
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeOpus4_1
|
||||
@@ -385,7 +410,9 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking => 1.0,
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking => 1.0,
|
||||
Self::Custom {
|
||||
default_temperature,
|
||||
..
|
||||
@@ -409,6 +436,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Haiku => true,
|
||||
|
||||
// Amazon Nova models (all support tool use)
|
||||
@@ -439,6 +468,8 @@ impl Model {
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeOpus4_1
|
||||
@@ -488,9 +519,11 @@ impl Model {
|
||||
Model::Claude3_7SonnetThinking => BedrockModelMode::Thinking {
|
||||
budget_tokens: Some(4096),
|
||||
},
|
||||
Model::ClaudeSonnet4Thinking => BedrockModelMode::Thinking {
|
||||
budget_tokens: Some(4096),
|
||||
},
|
||||
Model::ClaudeSonnet4Thinking | Model::ClaudeSonnet4_5Thinking => {
|
||||
BedrockModelMode::Thinking {
|
||||
budget_tokens: Some(4096),
|
||||
}
|
||||
}
|
||||
Model::ClaudeOpus4Thinking | Model::ClaudeOpus4_1Thinking => {
|
||||
BedrockModelMode::Thinking {
|
||||
budget_tokens: Some(4096),
|
||||
@@ -542,6 +575,8 @@ impl Model {
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking
|
||||
| Model::ClaudeOpus4
|
||||
| Model::ClaudeOpus4Thinking
|
||||
| Model::ClaudeOpus4_1
|
||||
@@ -575,6 +610,8 @@ impl Model {
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet
|
||||
| Model::MetaLlama321BInstructV1
|
||||
@@ -592,7 +629,9 @@ impl Model {
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking,
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
"apac",
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
@@ -631,6 +670,10 @@ mod tests {
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1")?,
|
||||
"eu.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1")?,
|
||||
"eu.anthropic.claude-sonnet-4-5-20250929-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?,
|
||||
"eu.anthropic.claude-3-sonnet-20240229-v1:0"
|
||||
|
||||
@@ -22,6 +22,7 @@ default = []
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
askpass.workspace = true
|
||||
clap.workspace = true
|
||||
collections.workspace = true
|
||||
ipc-channel = "0.19"
|
||||
|
||||
@@ -116,6 +116,11 @@ struct Args {
|
||||
))]
|
||||
#[arg(long)]
|
||||
uninstall: bool,
|
||||
|
||||
/// Used for SSH/Git password authentication, to remove the need for netcat as a dependency,
|
||||
/// by having Zed act like netcat communicating over a Unix socket.
|
||||
#[arg(long, hide = true)]
|
||||
askpass: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
|
||||
@@ -203,6 +208,12 @@ fn main() -> Result<()> {
|
||||
}
|
||||
let args = Args::parse();
|
||||
|
||||
// `zed --askpass` Makes zed operate in nc/netcat mode for use with askpass
|
||||
if let Some(socket) = &args.askpass {
|
||||
askpass::main(socket);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Set custom data directory before any path operations
|
||||
let user_data_dir = args.user_data_dir.clone();
|
||||
if let Some(dir) = &user_data_dir {
|
||||
|
||||
@@ -17,5 +17,6 @@ cloud_llm_client.workspace = true
|
||||
indoc.workspace = true
|
||||
ordered-float.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
serde.workspace = true
|
||||
strum.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -5,6 +5,7 @@ use cloud_llm_client::predict_edits_v3::{self, Event, PromptFormat, ReferencedDe
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::Serialize;
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp::Reverse, collections::BinaryHeap, ops::Range, path::Path};
|
||||
@@ -75,7 +76,7 @@ pub enum DeclarationStyle {
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SectionLabels {
|
||||
pub excerpt_index: usize,
|
||||
pub section_ranges: Vec<(Arc<Path>, Range<usize>)>,
|
||||
|
||||
@@ -20,7 +20,5 @@ LLM_DATABASE_MAX_CONNECTIONS = 5
|
||||
LLM_API_SECRET = "llm-secret"
|
||||
OPENAI_API_KEY = "llm-secret"
|
||||
|
||||
# SLACK_PANICS_WEBHOOK = ""
|
||||
|
||||
# RUST_LOG=info
|
||||
# LOG_JSON=true
|
||||
|
||||
@@ -46,7 +46,6 @@ rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
reqwest_client.workspace = true
|
||||
rpc.workspace = true
|
||||
rustc-demangle.workspace = true
|
||||
scrypt = "0.11"
|
||||
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
semantic_version.workspace = true
|
||||
|
||||
@@ -214,11 +214,6 @@ spec:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: bucket
|
||||
- name: SLACK_PANICS_WEBHOOK
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: slack
|
||||
key: panics_webhook
|
||||
- name: COMPLETE_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR
|
||||
value: "1000"
|
||||
- name: SUPERMAVEN_ADMIN_API_KEY
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
alter table billing_subscriptions
|
||||
add column token_spend_in_cents integer,
|
||||
add column token_spend_in_cents_updated_at timestamp without time zone;
|
||||
@@ -1,8 +1,6 @@
|
||||
pub mod contributors;
|
||||
pub mod events;
|
||||
pub mod extensions;
|
||||
pub mod ips_file;
|
||||
pub mod slack;
|
||||
|
||||
use crate::{AppState, Error, Result, auth, db::UserId, rpc};
|
||||
use anyhow::Context as _;
|
||||
|
||||
@@ -1,33 +1,28 @@
|
||||
use super::ips_file::IpsFile;
|
||||
use crate::api::CloudflareIpCountryHeader;
|
||||
use crate::{AppState, Error, Result, api::slack};
|
||||
use crate::{AppState, Error, Result};
|
||||
use anyhow::anyhow;
|
||||
use aws_sdk_s3::primitives::ByteStream;
|
||||
use axum::{
|
||||
Extension, Router, TypedHeader,
|
||||
body::Bytes,
|
||||
headers::Header,
|
||||
http::{HeaderMap, HeaderName, StatusCode},
|
||||
http::{HeaderName, StatusCode},
|
||||
routing::post,
|
||||
};
|
||||
use chrono::Duration;
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use telemetry_events::{Event, EventRequestBody, Panic};
|
||||
use telemetry_events::{Event, EventRequestBody};
|
||||
use util::ResultExt;
|
||||
use uuid::Uuid;
|
||||
|
||||
const CRASH_REPORTS_BUCKET: &str = "zed-crash-reports";
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/telemetry/events", post(post_events))
|
||||
.route("/telemetry/crashes", post(post_crash))
|
||||
.route("/telemetry/crashes", post(post_panic))
|
||||
.route("/telemetry/panics", post(post_panic))
|
||||
.route("/telemetry/hangs", post(post_hang))
|
||||
.route("/telemetry/hangs", post(post_panic))
|
||||
}
|
||||
|
||||
pub struct ZedChecksumHeader(Vec<u8>);
|
||||
@@ -58,437 +53,12 @@ impl Header for ZedChecksumHeader {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn post_crash(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
headers: HeaderMap,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let report = IpsFile::parse(&body)?;
|
||||
let version_threshold = SemanticVersion::new(0, 123, 0);
|
||||
|
||||
let bundle_id = &report.header.bundle_id;
|
||||
let app_version = &report.app_version();
|
||||
|
||||
if bundle_id == "dev.zed.Zed-Dev" {
|
||||
log::error!("Crash uploads from {} are ignored.", bundle_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if app_version.is_none() || app_version.unwrap() < version_threshold {
|
||||
log::error!(
|
||||
"Crash uploads from {} are ignored.",
|
||||
report.header.app_version
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
let app_version = app_version.unwrap();
|
||||
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
let response = blob_store_client
|
||||
.head_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(report.header.incident_id.clone() + ".ips")
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if response.is_ok() {
|
||||
log::info!("We've already uploaded this crash");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(report.header.incident_id.clone() + ".ips")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let recent_panic_on: Option<i64> = headers
|
||||
.get("x-zed-panicked-on")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.and_then(|s| s.parse().ok());
|
||||
|
||||
let installation_id = headers
|
||||
.get("x-zed-installation-id")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut recent_panic = None;
|
||||
|
||||
if let Some(recent_panic_on) = recent_panic_on {
|
||||
let crashed_at = match report.timestamp() {
|
||||
Ok(t) => Some(t),
|
||||
Err(e) => {
|
||||
log::error!("Can't parse {}: {}", report.header.timestamp, e);
|
||||
None
|
||||
}
|
||||
};
|
||||
if crashed_at.is_some_and(|t| (t.timestamp_millis() - recent_panic_on).abs() <= 30000) {
|
||||
recent_panic = headers.get("x-zed-panic").and_then(|h| h.to_str().ok());
|
||||
}
|
||||
}
|
||||
|
||||
let description = report.description(recent_panic);
|
||||
let summary = report.backtrace_summary();
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %report.header.app_version,
|
||||
os_version = %report.header.os_version,
|
||||
bundle_id = %report.header.bundle_id,
|
||||
incident_id = %report.header.incident_id,
|
||||
installation_id = %installation_id,
|
||||
description = %description,
|
||||
backtrace = %summary,
|
||||
"crash report"
|
||||
);
|
||||
|
||||
if let Some(kinesis_client) = app.kinesis_client.clone()
|
||||
&& let Some(stream) = app.config.kinesis_stream.clone()
|
||||
{
|
||||
let properties = json!({
|
||||
"app_version": report.header.app_version,
|
||||
"os_version": report.header.os_version,
|
||||
"os_name": "macOS",
|
||||
"bundle_id": report.header.bundle_id,
|
||||
"incident_id": report.header.incident_id,
|
||||
"installation_id": installation_id,
|
||||
"description": description,
|
||||
"backtrace": summary,
|
||||
});
|
||||
let row = SnowflakeRow::new(
|
||||
"Crash Reported",
|
||||
None,
|
||||
false,
|
||||
Some(installation_id),
|
||||
properties,
|
||||
);
|
||||
let data = serde_json::to_vec(&row)?;
|
||||
kinesis_client
|
||||
.put_record()
|
||||
.stream_name(stream)
|
||||
.partition_key(row.insert_id.unwrap_or_default())
|
||||
.data(data.into())
|
||||
.send()
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
|
||||
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
|
||||
let payload = slack::WebhookBody::new(|w| {
|
||||
w.add_section(|s| s.text(slack::Text::markdown(description)))
|
||||
.add_section(|s| {
|
||||
s.add_field(slack::Text::markdown(format!(
|
||||
"*Version:*\n{} ({})",
|
||||
bundle_id, app_version
|
||||
)))
|
||||
.add_field({
|
||||
let hostname = app.config.blob_store_url.clone().unwrap_or_default();
|
||||
let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| {
|
||||
hostname.strip_prefix("http://").unwrap_or_default()
|
||||
});
|
||||
|
||||
slack::Text::markdown(format!(
|
||||
"*Incident:*\n<https://{}.{}/{}.ips|{}…>",
|
||||
CRASH_REPORTS_BUCKET,
|
||||
hostname,
|
||||
report.header.incident_id,
|
||||
report
|
||||
.header
|
||||
.incident_id
|
||||
.chars()
|
||||
.take(8)
|
||||
.collect::<String>(),
|
||||
))
|
||||
})
|
||||
})
|
||||
.add_rich_text(|r| r.add_preformatted(|p| p.add_text(summary)))
|
||||
});
|
||||
let payload_json = serde_json::to_string(&payload).map_err(|err| {
|
||||
log::error!("Failed to serialize payload to JSON: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(slack_panics_webhook)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(payload_json)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
log::error!("Failed to send payload to Slack: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
pub async fn post_panic() -> Result<()> {
|
||||
// as of v0.201.x crash/panic reporting is now done via Sentry.
|
||||
// The endpoint returns OK to avoid spurious errors for old clients.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn post_hang(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let Some(expected) = calculate_json_checksum(app.clone(), &body) else {
|
||||
return Err(Error::http(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"events not enabled".into(),
|
||||
))?;
|
||||
};
|
||||
|
||||
if checksum != expected {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid checksum".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
let incident_id = Uuid::new_v4().to_string();
|
||||
|
||||
// dump JSON into S3 so we can get frame offsets if we need to.
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(incident_id.clone() + ".hang.json")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let report: telemetry_events::HangReport = serde_json::from_slice(&body).map_err(|err| {
|
||||
log::error!("can't parse report json: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
let mut backtrace = "Possible hang detected on main thread:".to_string();
|
||||
let unknown = "<unknown>".to_string();
|
||||
for frame in report.backtrace.iter() {
|
||||
backtrace.push_str(&format!("\n{}", frame.symbols.first().unwrap_or(&unknown)));
|
||||
}
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %report.app_version.unwrap_or_default().to_string(),
|
||||
os_name = %report.os_name,
|
||||
os_version = report.os_version.unwrap_or_default(),
|
||||
incident_id = %incident_id,
|
||||
installation_id = %report.installation_id.unwrap_or_default(),
|
||||
backtrace = %backtrace,
|
||||
"hang report");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn post_panic(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let Some(expected) = calculate_json_checksum(app.clone(), &body) else {
|
||||
return Err(Error::http(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"events not enabled".into(),
|
||||
))?;
|
||||
};
|
||||
|
||||
if checksum != expected {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid checksum".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
let report: telemetry_events::PanicRequest = serde_json::from_slice(&body)
|
||||
.map_err(|_| Error::http(StatusCode::BAD_REQUEST, "invalid json".into()))?;
|
||||
let incident_id = uuid::Uuid::new_v4().to_string();
|
||||
let panic = report.panic;
|
||||
|
||||
if panic.os_name == "Linux" && panic.os_version == Some("1.0.0".to_string()) {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid os version".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
let response = blob_store_client
|
||||
.head_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(incident_id.clone() + ".json")
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if response.is_ok() {
|
||||
log::info!("We've already uploaded this crash");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(incident_id.clone() + ".json")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let backtrace = panic.backtrace.join("\n");
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %panic.app_version,
|
||||
os_name = %panic.os_name,
|
||||
os_version = %panic.os_version.clone().unwrap_or_default(),
|
||||
incident_id = %incident_id,
|
||||
installation_id = %panic.installation_id.clone().unwrap_or_default(),
|
||||
description = %panic.payload,
|
||||
backtrace = %backtrace,
|
||||
"panic report"
|
||||
);
|
||||
|
||||
if let Some(kinesis_client) = app.kinesis_client.clone()
|
||||
&& let Some(stream) = app.config.kinesis_stream.clone()
|
||||
{
|
||||
let properties = json!({
|
||||
"app_version": panic.app_version,
|
||||
"os_name": panic.os_name,
|
||||
"os_version": panic.os_version,
|
||||
"incident_id": incident_id,
|
||||
"installation_id": panic.installation_id,
|
||||
"description": panic.payload,
|
||||
"backtrace": backtrace,
|
||||
});
|
||||
let row = SnowflakeRow::new(
|
||||
"Panic Reported",
|
||||
None,
|
||||
false,
|
||||
panic.installation_id.clone(),
|
||||
properties,
|
||||
);
|
||||
let data = serde_json::to_vec(&row)?;
|
||||
kinesis_client
|
||||
.put_record()
|
||||
.stream_name(stream)
|
||||
.partition_key(row.insert_id.unwrap_or_default())
|
||||
.data(data.into())
|
||||
.send()
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
|
||||
if !report_to_slack(&panic) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
|
||||
let backtrace = if panic.backtrace.len() > 25 {
|
||||
let total = panic.backtrace.len();
|
||||
format!(
|
||||
"{}\n and {} more",
|
||||
panic
|
||||
.backtrace
|
||||
.iter()
|
||||
.take(20)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n"),
|
||||
total - 20
|
||||
)
|
||||
} else {
|
||||
panic.backtrace.join("\n")
|
||||
};
|
||||
let backtrace_with_summary = panic.payload + "\n" + &backtrace;
|
||||
|
||||
let version = if panic.release_channel == "nightly"
|
||||
&& !panic.app_version.contains("remote-server")
|
||||
&& let Some(sha) = panic.app_commit_sha
|
||||
{
|
||||
format!("Zed Nightly {}", sha.chars().take(7).collect::<String>())
|
||||
} else {
|
||||
panic.app_version
|
||||
};
|
||||
|
||||
let payload = slack::WebhookBody::new(|w| {
|
||||
w.add_section(|s| s.text(slack::Text::markdown("Panic request".to_string())))
|
||||
.add_section(|s| {
|
||||
s.add_field(slack::Text::markdown(format!("*Version:*\n {version} ",)))
|
||||
.add_field({
|
||||
let hostname = app.config.blob_store_url.clone().unwrap_or_default();
|
||||
let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| {
|
||||
hostname.strip_prefix("http://").unwrap_or_default()
|
||||
});
|
||||
|
||||
slack::Text::markdown(format!(
|
||||
"*{} {}:*\n<https://{}.{}/{}.json|{}…>",
|
||||
panic.os_name,
|
||||
panic.os_version.unwrap_or_default(),
|
||||
CRASH_REPORTS_BUCKET,
|
||||
hostname,
|
||||
incident_id,
|
||||
incident_id.chars().take(8).collect::<String>(),
|
||||
))
|
||||
})
|
||||
})
|
||||
.add_rich_text(|r| r.add_preformatted(|p| p.add_text(backtrace_with_summary)))
|
||||
});
|
||||
let payload_json = serde_json::to_string(&payload).map_err(|err| {
|
||||
log::error!("Failed to serialize payload to JSON: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(slack_panics_webhook)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(payload_json)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
log::error!("Failed to send payload to Slack: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn report_to_slack(panic: &Panic) -> bool {
|
||||
// Panics on macOS should make their way to Slack as a crash report,
|
||||
// so we don't need to send them a second time via this channel.
|
||||
if panic.os_name == "macOS" {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic.payload.contains("ERROR_SURFACE_LOST_KHR") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic.payload.contains("ERROR_INITIALIZATION_FAILED") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic
|
||||
.payload
|
||||
.contains("GPU has crashed, and no debug information is available")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub async fn post_events(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
|
||||
@@ -1,346 +0,0 @@
|
||||
use anyhow::Context as _;
|
||||
use collections::HashMap;
|
||||
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IpsFile {
|
||||
pub header: Header,
|
||||
pub body: Body,
|
||||
}
|
||||
|
||||
impl IpsFile {
|
||||
pub fn parse(bytes: &[u8]) -> anyhow::Result<IpsFile> {
|
||||
let mut split = bytes.splitn(2, |&b| b == b'\n');
|
||||
let header_bytes = split.next().context("No header found")?;
|
||||
let header: Header = serde_json::from_slice(header_bytes).context("parsing header")?;
|
||||
|
||||
let body_bytes = split.next().context("No body found")?;
|
||||
|
||||
let body: Body = serde_json::from_slice(body_bytes).context("parsing body")?;
|
||||
Ok(IpsFile { header, body })
|
||||
}
|
||||
|
||||
pub fn faulting_thread(&self) -> Option<&Thread> {
|
||||
self.body.threads.get(self.body.faulting_thread? as usize)
|
||||
}
|
||||
|
||||
pub fn app_version(&self) -> Option<SemanticVersion> {
|
||||
self.header.app_version.parse().ok()
|
||||
}
|
||||
|
||||
pub fn timestamp(&self) -> anyhow::Result<chrono::DateTime<chrono::FixedOffset>> {
|
||||
chrono::DateTime::parse_from_str(&self.header.timestamp, "%Y-%m-%d %H:%M:%S%.f %#z")
|
||||
.map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn description(&self, panic: Option<&str>) -> String {
|
||||
let mut desc = if self.body.termination.indicator == "Abort trap: 6" {
|
||||
match panic {
|
||||
Some(panic_message) => format!("Panic `{}`", panic_message),
|
||||
None => "Crash `Abort trap: 6` (possible panic)".into(),
|
||||
}
|
||||
} else if let Some(msg) = &self.body.exception.message {
|
||||
format!("Exception `{}`", msg)
|
||||
} else {
|
||||
format!("Crash `{}`", self.body.termination.indicator)
|
||||
};
|
||||
if let Some(thread) = self.faulting_thread() {
|
||||
if let Some(queue) = thread.queue.as_ref() {
|
||||
desc += &format!(
|
||||
" on thread {} ({})",
|
||||
self.body.faulting_thread.unwrap_or_default(),
|
||||
queue
|
||||
);
|
||||
} else {
|
||||
desc += &format!(
|
||||
" on thread {} ({})",
|
||||
self.body.faulting_thread.unwrap_or_default(),
|
||||
thread.name.clone().unwrap_or_default()
|
||||
);
|
||||
}
|
||||
}
|
||||
desc
|
||||
}
|
||||
|
||||
pub fn backtrace_summary(&self) -> String {
|
||||
if let Some(thread) = self.faulting_thread() {
|
||||
let mut frames = thread
|
||||
.frames
|
||||
.iter()
|
||||
.filter_map(|frame| {
|
||||
if let Some(name) = &frame.symbol {
|
||||
if self.is_ignorable_frame(name) {
|
||||
return None;
|
||||
}
|
||||
Some(format!("{:#}", rustc_demangle::demangle(name)))
|
||||
} else if let Some(image) = self.body.used_images.get(frame.image_index) {
|
||||
Some(image.name.clone().unwrap_or("<unknown-image>".into()))
|
||||
} else {
|
||||
Some("<unknown>".into())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let total = frames.len();
|
||||
if total > 21 {
|
||||
frames = frames.into_iter().take(20).collect();
|
||||
frames.push(format!(" and {} more...", total - 20))
|
||||
}
|
||||
frames.join("\n")
|
||||
} else {
|
||||
"<no backtrace available>".into()
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ignorable_frame(&self, symbol: &String) -> bool {
|
||||
[
|
||||
"pthread_kill",
|
||||
"panic",
|
||||
"backtrace",
|
||||
"rust_begin_unwind",
|
||||
"abort",
|
||||
]
|
||||
.iter()
|
||||
.any(|s| symbol.contains(s))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Header {
|
||||
pub app_name: String,
|
||||
pub timestamp: String,
|
||||
pub app_version: String,
|
||||
pub slice_uuid: String,
|
||||
pub build_version: String,
|
||||
pub platform: i64,
|
||||
#[serde(rename = "bundleID", default)]
|
||||
pub bundle_id: String,
|
||||
pub share_with_app_devs: i64,
|
||||
pub is_first_party: i64,
|
||||
pub bug_type: String,
|
||||
pub os_version: String,
|
||||
pub roots_installed: i64,
|
||||
pub name: String,
|
||||
pub incident_id: String,
|
||||
}
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Body {
|
||||
pub uptime: i64,
|
||||
pub proc_role: String,
|
||||
pub version: i64,
|
||||
#[serde(rename = "userID")]
|
||||
pub user_id: i64,
|
||||
pub deploy_version: i64,
|
||||
pub model_code: String,
|
||||
#[serde(rename = "coalitionID")]
|
||||
pub coalition_id: i64,
|
||||
pub os_version: OsVersion,
|
||||
pub capture_time: String,
|
||||
pub code_signing_monitor: i64,
|
||||
pub incident: String,
|
||||
pub pid: i64,
|
||||
pub translated: bool,
|
||||
pub cpu_type: String,
|
||||
#[serde(rename = "roots_installed")]
|
||||
pub roots_installed: i64,
|
||||
#[serde(rename = "bug_type")]
|
||||
pub bug_type: String,
|
||||
pub proc_launch: String,
|
||||
pub proc_start_abs_time: i64,
|
||||
pub proc_exit_abs_time: i64,
|
||||
pub proc_name: String,
|
||||
pub proc_path: String,
|
||||
pub bundle_info: BundleInfo,
|
||||
pub store_info: StoreInfo,
|
||||
pub parent_proc: String,
|
||||
pub parent_pid: i64,
|
||||
pub coalition_name: String,
|
||||
pub crash_reporter_key: String,
|
||||
#[serde(rename = "codeSigningID")]
|
||||
pub code_signing_id: String,
|
||||
#[serde(rename = "codeSigningTeamID")]
|
||||
pub code_signing_team_id: String,
|
||||
pub code_signing_flags: i64,
|
||||
pub code_signing_validation_category: i64,
|
||||
pub code_signing_trust_level: i64,
|
||||
pub instruction_byte_stream: InstructionByteStream,
|
||||
pub sip: String,
|
||||
pub exception: Exception,
|
||||
pub termination: Termination,
|
||||
pub asi: Asi,
|
||||
pub ext_mods: ExtMods,
|
||||
pub faulting_thread: Option<i64>,
|
||||
pub threads: Vec<Thread>,
|
||||
pub used_images: Vec<UsedImage>,
|
||||
pub shared_cache: SharedCache,
|
||||
pub vm_summary: String,
|
||||
pub legacy_info: LegacyInfo,
|
||||
pub log_writing_signature: String,
|
||||
pub trial_info: TrialInfo,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct OsVersion {
|
||||
pub train: String,
|
||||
pub build: String,
|
||||
pub release_type: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct BundleInfo {
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
pub cfbundle_short_version_string: String,
|
||||
#[serde(rename = "CFBundleVersion")]
|
||||
pub cfbundle_version: String,
|
||||
#[serde(rename = "CFBundleIdentifier")]
|
||||
pub cfbundle_identifier: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct StoreInfo {
|
||||
pub device_identifier_for_vendor: String,
|
||||
pub third_party: bool,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct InstructionByteStream {
|
||||
#[serde(rename = "beforePC")]
|
||||
pub before_pc: String,
|
||||
#[serde(rename = "atPC")]
|
||||
pub at_pc: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Exception {
|
||||
pub codes: String,
|
||||
pub raw_codes: Vec<i64>,
|
||||
#[serde(rename = "type")]
|
||||
pub type_field: String,
|
||||
pub subtype: Option<String>,
|
||||
pub signal: String,
|
||||
pub port: Option<i64>,
|
||||
pub guard_id: Option<i64>,
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Termination {
|
||||
pub flags: i64,
|
||||
pub code: i64,
|
||||
pub namespace: String,
|
||||
pub indicator: String,
|
||||
pub by_proc: String,
|
||||
pub by_pid: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Asi {
|
||||
#[serde(rename = "libsystem_c.dylib")]
|
||||
pub libsystem_c_dylib: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ExtMods {
|
||||
pub caller: ExtMod,
|
||||
pub system: ExtMod,
|
||||
pub targeted: ExtMod,
|
||||
pub warnings: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ExtMod {
|
||||
#[serde(rename = "thread_create")]
|
||||
pub thread_create: i64,
|
||||
#[serde(rename = "thread_set_state")]
|
||||
pub thread_set_state: i64,
|
||||
#[serde(rename = "task_for_pid")]
|
||||
pub task_for_pid: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Thread {
|
||||
pub thread_state: HashMap<String, Value>,
|
||||
pub id: i64,
|
||||
pub triggered: Option<bool>,
|
||||
pub name: Option<String>,
|
||||
pub queue: Option<String>,
|
||||
pub frames: Vec<Frame>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Frame {
|
||||
pub image_offset: i64,
|
||||
pub symbol: Option<String>,
|
||||
pub symbol_location: Option<i64>,
|
||||
pub image_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct UsedImage {
|
||||
pub source: String,
|
||||
pub arch: Option<String>,
|
||||
pub base: i64,
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
pub cfbundle_short_version_string: Option<String>,
|
||||
#[serde(rename = "CFBundleIdentifier")]
|
||||
pub cfbundle_identifier: Option<String>,
|
||||
pub size: i64,
|
||||
pub uuid: String,
|
||||
pub path: Option<String>,
|
||||
pub name: Option<String>,
|
||||
#[serde(rename = "CFBundleVersion")]
|
||||
pub cfbundle_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct SharedCache {
|
||||
pub base: i64,
|
||||
pub size: i64,
|
||||
pub uuid: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct LegacyInfo {
|
||||
pub thread_triggered: ThreadTriggered,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ThreadTriggered {
|
||||
pub name: String,
|
||||
pub queue: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct TrialInfo {
|
||||
pub rollouts: Vec<Rollout>,
|
||||
pub experiments: Vec<Value>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Rollout {
|
||||
pub rollout_id: String,
|
||||
pub factor_pack_ids: HashMap<String, Value>,
|
||||
pub deployment_id: i64,
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// https://api.slack.com/reference/messaging/payload
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct WebhookBody {
|
||||
text: String,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
blocks: Vec<Block>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
thread_ts: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
mrkdwn: Option<bool>,
|
||||
}
|
||||
|
||||
impl WebhookBody {
|
||||
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
|
||||
f(Self::default())
|
||||
}
|
||||
|
||||
pub fn add_section(mut self, build: impl FnOnce(Section) -> Section) -> Self {
|
||||
self.blocks.push(Block::Section(build(Section::default())));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_rich_text(mut self, build: impl FnOnce(RichText) -> RichText) -> Self {
|
||||
self.blocks
|
||||
.push(Block::RichText(build(RichText::default())));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
/// https://api.slack.com/reference/block-kit/blocks
|
||||
pub enum Block {
|
||||
#[serde(rename = "section")]
|
||||
Section(Section),
|
||||
#[serde(rename = "rich_text")]
|
||||
RichText(RichText),
|
||||
// .... etc.
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#section
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct Section {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
text: Option<Text>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
fields: Vec<Text>,
|
||||
// fields, accessories...
|
||||
}
|
||||
|
||||
impl Section {
|
||||
pub fn text(mut self, text: Text) -> Self {
|
||||
self.text = Some(text);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_field(mut self, field: Text) -> Self {
|
||||
self.fields.push(field);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/composition-objects#text
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum Text {
|
||||
#[serde(rename = "plain_text")]
|
||||
PlainText { text: String, emoji: bool },
|
||||
#[serde(rename = "mrkdwn")]
|
||||
Markdown { text: String, verbatim: bool },
|
||||
}
|
||||
|
||||
impl Text {
|
||||
pub fn plain(s: String) -> Self {
|
||||
Self::PlainText {
|
||||
text: s,
|
||||
emoji: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn markdown(s: String) -> Self {
|
||||
Self::Markdown {
|
||||
text: s,
|
||||
verbatim: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct RichText {
|
||||
elements: Vec<RichTextObject>,
|
||||
}
|
||||
|
||||
impl RichText {
|
||||
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
|
||||
f(Self::default())
|
||||
}
|
||||
|
||||
pub fn add_preformatted(
|
||||
mut self,
|
||||
build: impl FnOnce(RichTextPreformatted) -> RichTextPreformatted,
|
||||
) -> Self {
|
||||
self.elements.push(RichTextObject::Preformatted(build(
|
||||
RichTextPreformatted::default(),
|
||||
)));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#rich_text
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RichTextObject {
|
||||
#[serde(rename = "rich_text_preformatted")]
|
||||
Preformatted(RichTextPreformatted),
|
||||
// etc.
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#rich_text_preformatted
|
||||
#[derive(Clone, Default, Serialize, Deserialize)]
|
||||
pub struct RichTextPreformatted {
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
elements: Vec<RichTextElement>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
border: Option<u8>,
|
||||
}
|
||||
|
||||
impl RichTextPreformatted {
|
||||
pub fn add_text(mut self, text: String) -> Self {
|
||||
self.elements.push(RichTextElement::Text { text });
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#element-types
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RichTextElement {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
// etc.
|
||||
}
|
||||
@@ -153,7 +153,6 @@ pub struct Config {
|
||||
pub prediction_api_key: Option<Arc<str>>,
|
||||
pub prediction_model: Option<Arc<str>>,
|
||||
pub zed_client_checksum_seed: Option<String>,
|
||||
pub slack_panics_webhook: Option<String>,
|
||||
pub auto_join_channel_id: Option<ChannelId>,
|
||||
pub supermaven_admin_api_key: Option<Arc<str>>,
|
||||
}
|
||||
@@ -204,7 +203,6 @@ impl Config {
|
||||
prediction_api_key: None,
|
||||
prediction_model: None,
|
||||
zed_client_checksum_seed: None,
|
||||
slack_panics_webhook: None,
|
||||
auto_join_channel_id: None,
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
|
||||
@@ -84,7 +84,11 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
diff.update(cx_b, |diff, cx| {
|
||||
assert_eq!(
|
||||
diff.excerpt_paths(cx),
|
||||
vec!["changed.txt", "deleted.txt", "created.txt"]
|
||||
vec![
|
||||
rel_path("changed.txt").into_arc(),
|
||||
rel_path("deleted.txt").into_arc(),
|
||||
rel_path("created.txt").into_arc()
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -121,7 +125,11 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
diff.update(cx_b, |diff, cx| {
|
||||
assert_eq!(
|
||||
diff.excerpt_paths(cx),
|
||||
vec!["deleted.txt", "unchanged.txt", "created.txt"]
|
||||
vec![
|
||||
rel_path("deleted.txt").into_arc(),
|
||||
rel_path("unchanged.txt").into_arc(),
|
||||
rel_path("created.txt").into_arc()
|
||||
]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6514,14 +6514,8 @@ async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
|
||||
cx.simulate_keystrokes("cmd-n cmd-n cmd-n");
|
||||
cx.update(|window, _cx| window.refresh());
|
||||
|
||||
let tab_bounds = cx.debug_bounds("TAB-2").unwrap();
|
||||
let new_tab_button_bounds = cx.debug_bounds("ICON-Plus").unwrap();
|
||||
|
||||
assert!(
|
||||
tab_bounds.intersects(&new_tab_button_bounds),
|
||||
"Tab should overlap with the new tab button, if this is failing check if there's been a redesign!"
|
||||
);
|
||||
|
||||
cx.simulate_event(MouseDownEvent {
|
||||
button: MouseButton::Right,
|
||||
position: new_tab_button_bounds.center(),
|
||||
|
||||
@@ -599,7 +599,6 @@ impl TestServer {
|
||||
prediction_api_key: None,
|
||||
prediction_model: None,
|
||||
zed_client_checksum_seed: None,
|
||||
slack_panics_webhook: None,
|
||||
auto_join_channel_id: None,
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
|
||||
@@ -922,7 +922,7 @@ impl CollabPanel {
|
||||
|
||||
ListItem::new(user.github_login.clone())
|
||||
.start_slot(Avatar::new(user.avatar_uri.clone()))
|
||||
.child(Label::new(user.github_login.clone()))
|
||||
.child(render_participant_name_and_handle(user))
|
||||
.toggle_state(is_selected)
|
||||
.end_slot(if is_pending {
|
||||
Label::new("Calling").color(Color::Muted).into_any_element()
|
||||
@@ -2505,7 +2505,7 @@ impl CollabPanel {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(github_login.clone()))
|
||||
.child(render_participant_name_and_handle(&contact.user))
|
||||
.when(calling, |el| {
|
||||
el.child(Label::new("Calling").color(Color::Muted))
|
||||
})
|
||||
@@ -2940,6 +2940,14 @@ fn render_tree_branch(
|
||||
.h(line_height)
|
||||
}
|
||||
|
||||
fn render_participant_name_and_handle(user: &User) -> impl IntoElement {
|
||||
Label::new(if let Some(ref display_name) = user.name {
|
||||
format!("{display_name} ({})", user.github_login)
|
||||
} else {
|
||||
user.github_login.to_string()
|
||||
})
|
||||
}
|
||||
|
||||
impl Render for CollabPanel {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
@@ -3170,8 +3178,8 @@ struct JoinChannelTooltip {
|
||||
}
|
||||
|
||||
impl Render for JoinChannelTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(window, cx, |container, _, cx| {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(cx, |container, cx| {
|
||||
let participants = self
|
||||
.channel_store
|
||||
.read(cx)
|
||||
@@ -3183,7 +3191,7 @@ impl Render for JoinChannelTooltip {
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Avatar::new(participant.avatar_uri.clone()))
|
||||
.child(Label::new(participant.github_login.clone()))
|
||||
.child(render_participant_name_and_handle(participant))
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ impl DebuggerOnboardingModal {
|
||||
}
|
||||
|
||||
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.open_url("http://zed.dev/blog/debugger");
|
||||
cx.open_url("https://zed.dev/blog/debugger");
|
||||
cx.notify();
|
||||
|
||||
debugger_onboarding_event!("Blog Link Clicked");
|
||||
|
||||
@@ -41,8 +41,8 @@ use serde_json::Value;
|
||||
use settings::Settings;
|
||||
use stack_frame_list::StackFrameList;
|
||||
use task::{
|
||||
BuildTaskDefinition, DebugScenario, ShellBuilder, SpawnInTerminal, TaskContext, ZedDebugConfig,
|
||||
substitute_variables_in_str,
|
||||
BuildTaskDefinition, DebugScenario, Shell, ShellBuilder, SpawnInTerminal, TaskContext,
|
||||
ZedDebugConfig, substitute_variables_in_str,
|
||||
};
|
||||
use terminal_view::TerminalView;
|
||||
use ui::{
|
||||
@@ -988,7 +988,7 @@ impl RunningState {
|
||||
(task, None)
|
||||
}
|
||||
};
|
||||
let Some(task) = task_template.resolve_task("debug-build-task", &task_context) else {
|
||||
let Some(mut task) = task_template.resolve_task("debug-build-task", &task_context) else {
|
||||
anyhow::bail!("Could not resolve task variables within a debug scenario");
|
||||
};
|
||||
|
||||
@@ -1025,7 +1025,11 @@ impl RunningState {
|
||||
None
|
||||
};
|
||||
|
||||
let builder = ShellBuilder::new(remote_shell.as_deref(), &task.resolved.shell);
|
||||
if let Some(remote_shell) = remote_shell && task.resolved.shell == Shell::System {
|
||||
task.resolved.shell = Shell::Program(remote_shell);
|
||||
}
|
||||
|
||||
let builder = ShellBuilder::new(&task.resolved.shell);
|
||||
let command_label = builder.command_label(task.resolved.command.as_deref().unwrap_or(""));
|
||||
let (command, args) =
|
||||
builder.build(task.resolved.command.clone(), &task.resolved.args);
|
||||
@@ -1228,7 +1232,6 @@ impl RunningState {
|
||||
|
||||
terminal.read_with(cx, |terminal, _| {
|
||||
terminal
|
||||
.pty_info
|
||||
.pid()
|
||||
.map(|pid| pid.as_u32())
|
||||
.context("Terminal was spawned but PID was not available")
|
||||
|
||||
@@ -9,7 +9,10 @@ use gpui::{
|
||||
Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState,
|
||||
Subscription, Task, WeakEntity, list,
|
||||
};
|
||||
use util::debug_panic;
|
||||
use util::{
|
||||
debug_panic,
|
||||
paths::{PathStyle, is_absolute},
|
||||
};
|
||||
|
||||
use crate::{StackTraceView, ToggleUserFrames};
|
||||
use language::PointUtf16;
|
||||
@@ -470,8 +473,12 @@ impl StackFrameList {
|
||||
stack_frame.source.as_ref().and_then(|s| {
|
||||
s.path
|
||||
.as_deref()
|
||||
.filter(|path| {
|
||||
// Since we do not know if we are debugging on the host or (a remote/WSL) target,
|
||||
// we need to check if either the path is absolute as Posix or Windows.
|
||||
is_absolute(path, PathStyle::Posix) || is_absolute(path, PathStyle::Windows)
|
||||
})
|
||||
.map(|path| Arc::<Path>::from(Path::new(path)))
|
||||
.filter(|path| path.is_absolute())
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1213,7 +1213,7 @@ impl VariableList {
|
||||
|
||||
let weak = cx.weak_entity();
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
let watcher_len = (self.list_handle.content_size().width.0 / 12.0).floor() - 3.0;
|
||||
let watcher_len = (f32::from(self.list_handle.content_size().width / 12.0).floor()) - 3.0;
|
||||
let watcher_len = watcher_len as usize;
|
||||
|
||||
div()
|
||||
|
||||
@@ -15,7 +15,7 @@ use gpui::{
|
||||
InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription,
|
||||
Task, WeakEntity, Window, actions, div,
|
||||
};
|
||||
use language::{Buffer, DiagnosticEntry, Point};
|
||||
use language::{Buffer, DiagnosticEntry, DiagnosticEntryRef, Point};
|
||||
use project::{
|
||||
DiagnosticSummary, Event, Project, ProjectItem, ProjectPath,
|
||||
project_settings::{DiagnosticSeverity, ProjectSettings},
|
||||
@@ -350,7 +350,7 @@ impl BufferDiagnosticsEditor {
|
||||
grouped
|
||||
.entry(entry.diagnostic.group_id)
|
||||
.or_default()
|
||||
.push(DiagnosticEntry {
|
||||
.push(DiagnosticEntryRef {
|
||||
range: entry.range.to_point(&buffer_snapshot),
|
||||
diagnostic: entry.diagnostic,
|
||||
})
|
||||
@@ -560,13 +560,16 @@ impl BufferDiagnosticsEditor {
|
||||
})
|
||||
}
|
||||
|
||||
fn set_diagnostics(&mut self, diagnostics: &Vec<DiagnosticEntry<Anchor>>) {
|
||||
self.diagnostics = diagnostics.clone();
|
||||
fn set_diagnostics(&mut self, diagnostics: &[DiagnosticEntryRef<'_, Anchor>]) {
|
||||
self.diagnostics = diagnostics
|
||||
.iter()
|
||||
.map(DiagnosticEntryRef::to_owned)
|
||||
.collect();
|
||||
}
|
||||
|
||||
fn diagnostics_are_unchanged(
|
||||
&self,
|
||||
diagnostics: &Vec<DiagnosticEntry<Anchor>>,
|
||||
diagnostics: &Vec<DiagnosticEntryRef<'_, Anchor>>,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> bool {
|
||||
if self.diagnostics.len() != diagnostics.len() {
|
||||
|
||||
@@ -6,7 +6,7 @@ use editor::{
|
||||
hover_popover::diagnostics_markdown_style,
|
||||
};
|
||||
use gpui::{AppContext, Entity, Focusable, WeakEntity};
|
||||
use language::{BufferId, Diagnostic, DiagnosticEntry};
|
||||
use language::{BufferId, Diagnostic, DiagnosticEntryRef};
|
||||
use lsp::DiagnosticSeverity;
|
||||
use markdown::{Markdown, MarkdownElement};
|
||||
use settings::Settings;
|
||||
@@ -24,7 +24,7 @@ pub struct DiagnosticRenderer;
|
||||
|
||||
impl DiagnosticRenderer {
|
||||
pub fn diagnostic_blocks_for_group(
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
buffer_id: BufferId,
|
||||
diagnostics_editor: Option<Arc<dyn DiagnosticsToolbarEditor>>,
|
||||
cx: &mut App,
|
||||
@@ -35,7 +35,7 @@ impl DiagnosticRenderer {
|
||||
else {
|
||||
return Vec::new();
|
||||
};
|
||||
let primary = diagnostic_group[primary_ix].clone();
|
||||
let primary = &diagnostic_group[primary_ix];
|
||||
let group_id = primary.diagnostic.group_id;
|
||||
let mut results = vec![];
|
||||
for entry in diagnostic_group.iter() {
|
||||
@@ -123,7 +123,7 @@ impl DiagnosticRenderer {
|
||||
impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
fn render_group(
|
||||
&self,
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
buffer_id: BufferId,
|
||||
snapshot: EditorSnapshot,
|
||||
editor: WeakEntity<Editor>,
|
||||
@@ -152,19 +152,15 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
|
||||
fn render_hover(
|
||||
&self,
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
range: Range<Point>,
|
||||
buffer_id: BufferId,
|
||||
cx: &mut App,
|
||||
) -> Option<Entity<Markdown>> {
|
||||
let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx);
|
||||
blocks.into_iter().find_map(|block| {
|
||||
if block.initial_range == range {
|
||||
Some(block.markdown)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
blocks
|
||||
.into_iter()
|
||||
.find_map(|block| (block.initial_range == range).then(|| block.markdown))
|
||||
}
|
||||
|
||||
fn open_link(
|
||||
@@ -189,7 +185,7 @@ pub(crate) struct DiagnosticBlock {
|
||||
impl DiagnosticBlock {
|
||||
pub fn render_block(&self, editor: WeakEntity<Editor>, bcx: &BlockContext) -> AnyElement {
|
||||
let cx = &bcx.app;
|
||||
let status_colors = bcx.app.theme().status();
|
||||
let status_colors = cx.theme().status();
|
||||
|
||||
let max_width = bcx.em_width * 120.;
|
||||
|
||||
|
||||
@@ -22,7 +22,8 @@ use gpui::{
|
||||
Subscription, Task, WeakEntity, Window, actions, div,
|
||||
};
|
||||
use language::{
|
||||
Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, Point, ToTreeSitterPoint,
|
||||
Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, DiagnosticEntryRef, Point,
|
||||
ToTreeSitterPoint,
|
||||
};
|
||||
use project::{
|
||||
DiagnosticSummary, Project, ProjectPath,
|
||||
@@ -412,8 +413,8 @@ impl ProjectDiagnosticsEditor {
|
||||
|
||||
fn diagnostics_are_unchanged(
|
||||
&self,
|
||||
existing: &Vec<DiagnosticEntry<text::Anchor>>,
|
||||
new: &Vec<DiagnosticEntry<text::Anchor>>,
|
||||
existing: &[DiagnosticEntry<text::Anchor>],
|
||||
new: &[DiagnosticEntryRef<'_, text::Anchor>],
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> bool {
|
||||
if existing.len() != new.len() {
|
||||
@@ -457,7 +458,13 @@ impl ProjectDiagnosticsEditor {
|
||||
}) {
|
||||
return true;
|
||||
}
|
||||
this.diagnostics.insert(buffer_id, diagnostics.clone());
|
||||
this.diagnostics.insert(
|
||||
buffer_id,
|
||||
diagnostics
|
||||
.iter()
|
||||
.map(DiagnosticEntryRef::to_owned)
|
||||
.collect(),
|
||||
);
|
||||
false
|
||||
})?;
|
||||
if unchanged {
|
||||
@@ -469,7 +476,7 @@ impl ProjectDiagnosticsEditor {
|
||||
grouped
|
||||
.entry(entry.diagnostic.group_id)
|
||||
.or_default()
|
||||
.push(DiagnosticEntry {
|
||||
.push(DiagnosticEntryRef {
|
||||
range: entry.range.to_point(&buffer_snapshot),
|
||||
diagnostic: entry.diagnostic,
|
||||
})
|
||||
|
||||
@@ -14,12 +14,14 @@ use workspace::{StatusItemView, ToolbarItemEvent, Workspace, item::ItemHandle};
|
||||
|
||||
use crate::{Deploy, IncludeWarnings, ProjectDiagnosticsEditor};
|
||||
|
||||
/// The status bar item that displays diagnostic counts.
|
||||
pub struct DiagnosticIndicator {
|
||||
summary: project::DiagnosticSummary,
|
||||
active_editor: Option<WeakEntity<Editor>>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
current_diagnostic: Option<Diagnostic>,
|
||||
active_editor: Option<WeakEntity<Editor>>,
|
||||
_observe_active_editor: Option<Subscription>,
|
||||
|
||||
diagnostics_update: Task<()>,
|
||||
diagnostic_summary_update: Task<()>,
|
||||
}
|
||||
@@ -73,10 +75,9 @@ impl Render for DiagnosticIndicator {
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.go_to_next_diagnostic(window, cx);
|
||||
}))
|
||||
.into_any_element(),
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| this.go_to_next_diagnostic(window, cx)),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@@ -177,7 +178,8 @@ impl DiagnosticIndicator {
|
||||
.filter(|entry| !entry.range.is_empty())
|
||||
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||
.map(|entry| entry.diagnostic);
|
||||
if new_diagnostic != self.current_diagnostic {
|
||||
if new_diagnostic != self.current_diagnostic.as_ref() {
|
||||
let new_diagnostic = new_diagnostic.cloned();
|
||||
self.diagnostics_update =
|
||||
cx.spawn_in(window, async move |diagnostics_indicator, cx| {
|
||||
cx.background_executor()
|
||||
|
||||
@@ -75,12 +75,9 @@ impl Render for ToolbarControls {
|
||||
&ToggleDiagnosticsRefresh,
|
||||
))
|
||||
.on_click(cx.listener(move |toolbar_controls, _, _, cx| {
|
||||
match toolbar_controls.editor() {
|
||||
Some(editor) => {
|
||||
editor.stop_updating(cx);
|
||||
cx.notify();
|
||||
}
|
||||
None => {}
|
||||
if let Some(editor) = toolbar_controls.editor() {
|
||||
editor.stop_updating(cx);
|
||||
cx.notify();
|
||||
}
|
||||
})),
|
||||
)
|
||||
@@ -95,11 +92,10 @@ impl Render for ToolbarControls {
|
||||
&ToggleDiagnosticsRefresh,
|
||||
))
|
||||
.on_click(cx.listener({
|
||||
move |toolbar_controls, _, window, cx| match toolbar_controls
|
||||
.editor()
|
||||
{
|
||||
Some(editor) => editor.refresh_diagnostics(window, cx),
|
||||
None => {}
|
||||
move |toolbar_controls, _, window, cx| {
|
||||
if let Some(editor) = toolbar_controls.editor() {
|
||||
editor.refresh_diagnostics(window, cx)
|
||||
}
|
||||
}
|
||||
})),
|
||||
)
|
||||
@@ -110,9 +106,10 @@ impl Render for ToolbarControls {
|
||||
.icon_color(warning_color)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(warning_tooltip))
|
||||
.on_click(cx.listener(|this, _, window, cx| match &this.editor {
|
||||
Some(editor) => editor.toggle_warnings(window, cx),
|
||||
None => {}
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
if let Some(editor) = &this.editor {
|
||||
editor.toggle_warnings(window, cx)
|
||||
}
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
ordered-float.workspace = true
|
||||
postage.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -55,6 +55,13 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_file(&self) -> Option<&FileDeclaration> {
|
||||
match self {
|
||||
Declaration::Buffer { .. } => None,
|
||||
Declaration::File { declaration, .. } => Some(declaration),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn project_entry_id(&self) -> ProjectEntryId {
|
||||
match self {
|
||||
Declaration::File {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents;
|
||||
use collections::HashMap;
|
||||
use itertools::Itertools as _;
|
||||
use language::BufferSnapshot;
|
||||
use ordered_float::OrderedFloat;
|
||||
use serde::Serialize;
|
||||
use std::{cmp::Reverse, collections::HashMap, ops::Range};
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
use strum::EnumIter;
|
||||
use text::{Point, ToPoint};
|
||||
|
||||
@@ -251,6 +252,7 @@ fn score_declaration(
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
impl DeclarationScores {
|
||||
@@ -258,7 +260,7 @@ impl DeclarationScores {
|
||||
// TODO: handle truncation
|
||||
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let accuracy_score = if components.is_same_file {
|
||||
let retrieval = if components.is_same_file {
|
||||
// TODO: use declaration_line_distance_rank
|
||||
1.0 / components.same_file_declaration_count as f32
|
||||
} else {
|
||||
@@ -274,13 +276,14 @@ impl DeclarationScores {
|
||||
};
|
||||
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * accuracy_score * distance_score;
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
DeclarationScores {
|
||||
signature: combined_score * components.excerpt_vs_signature_weighted_overlap,
|
||||
// declaration score gets boosted both by being multiplied by 2 and by there being more
|
||||
// weighted overlap.
|
||||
declaration: 2.0 * combined_score * components.excerpt_vs_item_weighted_overlap,
|
||||
retrieval,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,11 @@ mod excerpt;
|
||||
mod outline;
|
||||
mod reference;
|
||||
mod syntax_index;
|
||||
mod text_similarity;
|
||||
pub mod text_similarity;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext as _, Entity, Task};
|
||||
use language::BufferSnapshot;
|
||||
use text::{Point, ToOffset as _};
|
||||
@@ -33,8 +36,10 @@ impl EditPredictionContext {
|
||||
cx: &mut App,
|
||||
) -> Task<Option<Self>> {
|
||||
if let Some(syntax_index) = syntax_index {
|
||||
let index_state = syntax_index.read_with(cx, |index, _cx| index.state().clone());
|
||||
let index_state =
|
||||
syntax_index.read_with(cx, |index, _cx| Arc::downgrade(index.state()));
|
||||
cx.background_spawn(async move {
|
||||
let index_state = index_state.upgrade()?;
|
||||
let index_state = index_state.lock().await;
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, Some(&index_state))
|
||||
})
|
||||
@@ -50,6 +55,26 @@ impl EditPredictionContext {
|
||||
buffer: &BufferSnapshot,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
) -> Option<Self> {
|
||||
Self::gather_context_with_references_fn(
|
||||
cursor_point,
|
||||
buffer,
|
||||
excerpt_options,
|
||||
index_state,
|
||||
references_in_excerpt,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn gather_context_with_references_fn(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
get_references: impl FnOnce(
|
||||
&EditPredictionExcerpt,
|
||||
&EditPredictionExcerptText,
|
||||
&BufferSnapshot,
|
||||
) -> HashMap<Identifier, Vec<Reference>>,
|
||||
) -> Option<Self> {
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
@@ -73,7 +98,7 @@ impl EditPredictionContext {
|
||||
let cursor_offset_in_excerpt = cursor_offset_in_file.saturating_sub(excerpt.range.start);
|
||||
|
||||
let declarations = if let Some(index_state) = index_state {
|
||||
let references = references_in_excerpt(&excerpt, &excerpt_text, buffer);
|
||||
let references = get_references(&excerpt, &excerpt_text, buffer);
|
||||
|
||||
scored_declarations(
|
||||
&index_state,
|
||||
@@ -237,7 +262,8 @@ mod tests {
|
||||
let lang_id = lang.id();
|
||||
language_registry.add(Arc::new(lang));
|
||||
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
|
||||
let file_indexing_parallelism = 2;
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
(project, index, lang_id)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use collections::HashMap;
|
||||
use language::BufferSnapshot;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use util::RangeExt;
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::{
|
||||
excerpt::{EditPredictionExcerpt, EditPredictionExcerptText},
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Reference {
|
||||
pub identifier: Identifier,
|
||||
pub range: Range<usize>,
|
||||
@@ -26,7 +26,7 @@ pub fn references_in_excerpt(
|
||||
excerpt_text: &EditPredictionExcerptText,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> HashMap<Identifier, Vec<Reference>> {
|
||||
let mut references = identifiers_in_range(
|
||||
let mut references = references_in_range(
|
||||
excerpt.range.clone(),
|
||||
excerpt_text.body.as_str(),
|
||||
ReferenceRegion::Nearby,
|
||||
@@ -38,7 +38,7 @@ pub fn references_in_excerpt(
|
||||
.iter()
|
||||
.zip(excerpt_text.parent_signatures.iter())
|
||||
{
|
||||
references.extend(identifiers_in_range(
|
||||
references.extend(references_in_range(
|
||||
range.clone(),
|
||||
text.as_str(),
|
||||
ReferenceRegion::Breadcrumb,
|
||||
@@ -46,7 +46,7 @@ pub fn references_in_excerpt(
|
||||
));
|
||||
}
|
||||
|
||||
let mut identifier_to_references: HashMap<Identifier, Vec<Reference>> = HashMap::new();
|
||||
let mut identifier_to_references: HashMap<Identifier, Vec<Reference>> = HashMap::default();
|
||||
for reference in references {
|
||||
identifier_to_references
|
||||
.entry(reference.identifier.clone())
|
||||
@@ -57,7 +57,7 @@ pub fn references_in_excerpt(
|
||||
}
|
||||
|
||||
/// Finds all nodes which have a "variable" match from the highlights query within the offset range.
|
||||
pub fn identifiers_in_range(
|
||||
pub fn references_in_range(
|
||||
range: Range<usize>,
|
||||
range_text: &str,
|
||||
reference_region: ReferenceRegion,
|
||||
@@ -120,7 +120,7 @@ mod test {
|
||||
use indoc::indoc;
|
||||
use language::{BufferSnapshot, Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
|
||||
|
||||
use crate::reference::{ReferenceRegion, identifiers_in_range};
|
||||
use crate::reference::{ReferenceRegion, references_in_range};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_identifier_node_truncated(cx: &mut TestAppContext) {
|
||||
@@ -136,7 +136,7 @@ mod test {
|
||||
let buffer = create_buffer(code, cx);
|
||||
|
||||
let range = 0..35;
|
||||
let references = identifiers_in_range(
|
||||
let references = references_in_range(
|
||||
range.clone(),
|
||||
&code[range],
|
||||
ReferenceRegion::Breadcrumb,
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::channel::mpsc;
|
||||
use futures::lock::Mutex;
|
||||
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
|
||||
use futures::{FutureExt as _, StreamExt, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, BufferEvent};
|
||||
use postage::stream::Stream as _;
|
||||
use project::buffer_store::{BufferStore, BufferStoreEvent};
|
||||
use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
|
||||
use project::{PathChange, Project, ProjectEntryId, ProjectPath};
|
||||
use slotmap::SlotMap;
|
||||
use std::iter;
|
||||
use std::ops::Range;
|
||||
use std::ops::{DerefMut, Range};
|
||||
use std::sync::Arc;
|
||||
use text::BufferId;
|
||||
use util::{RangeExt as _, debug_panic, some_or_debug_panic};
|
||||
@@ -17,42 +22,60 @@ use crate::declaration::{
|
||||
};
|
||||
use crate::outline::declarations_in_buffer;
|
||||
|
||||
// TODO
|
||||
//
|
||||
// * Also queue / debounce buffer changes. A challenge for this is that use of
|
||||
// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
|
||||
//
|
||||
// * Add a per language configuration for skipping indexing.
|
||||
|
||||
// Potential future improvements:
|
||||
//
|
||||
// * Prevent indexing of a large file from blocking the queue.
|
||||
//
|
||||
// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
|
||||
// references are present and their scores.
|
||||
//
|
||||
// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
|
||||
// file in a build dependency. Should not be editable in that case - but how to distinguish the case
|
||||
// where it should be editable?
|
||||
|
||||
// Potential future optimizations:
|
||||
//
|
||||
// * Cache of buffers for files
|
||||
// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
|
||||
// of priority system to the background executor could help - it's single threaded for now to avoid
|
||||
// interfering with other work.
|
||||
//
|
||||
// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
|
||||
// languages? Will also need to find line boundaries, but that can be done by scanning characters in
|
||||
// the flat representation.
|
||||
// * Parse files directly instead of loading into a Rope.
|
||||
//
|
||||
// - This would allow the task handling dirty_files to be done entirely on the background executor.
|
||||
//
|
||||
// - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
|
||||
// but that can be done by scanning characters in the flat representation.
|
||||
//
|
||||
// * Use something similar to slotmap without key versions.
|
||||
//
|
||||
// * Concurrent slotmap
|
||||
//
|
||||
// * Use queue for parsing
|
||||
|
||||
pub struct SyntaxIndex {
|
||||
state: Arc<Mutex<SyntaxIndexState>>,
|
||||
project: WeakEntity<Project>,
|
||||
initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SyntaxIndexState {
|
||||
declarations: SlotMap<DeclarationId, Declaration>,
|
||||
identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
|
||||
files: HashMap<ProjectEntryId, FileState>,
|
||||
buffers: HashMap<BufferId, BufferState>,
|
||||
dirty_files: HashMap<ProjectEntryId, ProjectPath>,
|
||||
dirty_files_tx: mpsc::Sender<()>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct FileState {
|
||||
declarations: Vec<DeclarationId>,
|
||||
task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -62,34 +85,110 @@ struct BufferState {
|
||||
}
|
||||
|
||||
impl SyntaxIndex {
|
||||
pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
|
||||
let mut this = Self {
|
||||
pub fn new(
|
||||
project: &Entity<Project>,
|
||||
file_indexing_parallelism: usize,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
assert!(file_indexing_parallelism > 0);
|
||||
let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
|
||||
let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
|
||||
postage::watch::channel();
|
||||
|
||||
let initial_state = SyntaxIndexState {
|
||||
declarations: SlotMap::default(),
|
||||
identifiers: HashMap::default(),
|
||||
files: HashMap::default(),
|
||||
buffers: HashMap::default(),
|
||||
dirty_files: HashMap::default(),
|
||||
dirty_files_tx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
let this = Self {
|
||||
project: project.downgrade(),
|
||||
state: Arc::new(Mutex::new(SyntaxIndexState::default())),
|
||||
state: Arc::new(Mutex::new(initial_state)),
|
||||
initial_file_indexing_done_rx,
|
||||
};
|
||||
|
||||
let worktree_store = project.read(cx).worktree_store();
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
|
||||
for worktree in worktree_store
|
||||
let initial_worktree_snapshots = worktree_store
|
||||
.read(cx)
|
||||
.worktrees()
|
||||
.map(|w| w.read(cx).snapshot())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
for entry in worktree.files(false, 0) {
|
||||
this.update_file(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
}
|
||||
.collect::<Vec<_>>();
|
||||
if !initial_worktree_snapshots.is_empty() {
|
||||
this.state.try_lock().unwrap()._file_indexing_task =
|
||||
Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
.chunks(chunk_size);
|
||||
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
|
||||
log::info!("Finished initial file indexing");
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
|
||||
let buffer_store = project.read(cx).buffer_store().clone();
|
||||
for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
|
||||
this.register_buffer(&buffer, cx);
|
||||
@@ -100,6 +199,63 @@ impl SyntaxIndex {
|
||||
this
|
||||
}
|
||||
|
||||
async fn update_dirty_files(
|
||||
this: &WeakEntity<Self>,
|
||||
dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
|
||||
mut cx: AsyncApp,
|
||||
) {
|
||||
for (entry_id, project_path) in dirty_files {
|
||||
let Ok(task) = this.update(&mut cx, |this, cx| {
|
||||
this.update_file(entry_id, project_path, cx)
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
task.await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
|
||||
if *self.initial_file_indexing_done_rx.borrow() {
|
||||
Task::ready(Ok(()))
|
||||
} else {
|
||||
let mut rx = self.initial_file_indexing_done_rx.clone();
|
||||
cx.background_spawn(async move {
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
Some(true) => return Ok(()),
|
||||
Some(false) => {}
|
||||
None => {
|
||||
return Err(anyhow!(
|
||||
"SyntaxIndex dropped while waiting for initial file indexing"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
|
||||
let state = self.state.clone();
|
||||
let project = self.project.clone();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let state = state.lock().await;
|
||||
let Some(project) = project.upgrade() else {
|
||||
return vec![];
|
||||
};
|
||||
project
|
||||
.read_with(cx, |project, cx| {
|
||||
state
|
||||
.files
|
||||
.keys()
|
||||
.filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_worktree_store_event(
|
||||
&mut self,
|
||||
_worktree_store: Entity<WorktreeStore>,
|
||||
@@ -112,22 +268,27 @@ impl SyntaxIndex {
|
||||
let state = Arc::downgrade(&self.state);
|
||||
let worktree_id = *worktree_id;
|
||||
let updated_entries_set = updated_entries_set.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.background_spawn(async move {
|
||||
let Some(state) = state.upgrade() else { return };
|
||||
let mut state = state.lock().await;
|
||||
for (path, entry_id, path_change) in updated_entries_set.iter() {
|
||||
if let PathChange::Removed = path_change {
|
||||
state.lock().await.files.remove(entry_id);
|
||||
state.files.remove(entry_id);
|
||||
state.dirty_files.remove(entry_id);
|
||||
} else {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
path: path.clone(),
|
||||
};
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_file(*entry_id, project_path, cx);
|
||||
})
|
||||
.ok();
|
||||
state.dirty_files.insert(*entry_id, project_path);
|
||||
}
|
||||
}
|
||||
match state.dirty_files_tx.try_send(()) {
|
||||
Err(err) if err.is_disconnected() => {
|
||||
log::error!("bug: syntax indexing queue is disconnected");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
@@ -177,7 +338,7 @@ impl SyntaxIndex {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
cx.observe_release(buffer, move |this, _buffer, cx| {
|
||||
this.with_state(cx, move |state| {
|
||||
@@ -208,8 +369,11 @@ impl SyntaxIndex {
|
||||
}
|
||||
}
|
||||
|
||||
fn update_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
let buffer = buffer_entity.read(cx);
|
||||
if buffer.language().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(project_entry_id) =
|
||||
project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
|
||||
@@ -229,70 +393,64 @@ impl SyntaxIndex {
|
||||
}
|
||||
});
|
||||
|
||||
let parse_task = cx.background_spawn(async move {
|
||||
let snapshot = snapshot_task.await?;
|
||||
let rope = snapshot.text.as_rope().clone();
|
||||
let state = Arc::downgrade(&self.state);
|
||||
let task = cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.text.as_rope();
|
||||
|
||||
anyhow::Ok((
|
||||
declarations_in_buffer(&snapshot)
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
(
|
||||
item.parent_index,
|
||||
BufferDeclaration::from_outline(item, &rope),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
rope,
|
||||
))
|
||||
});
|
||||
|
||||
let task = cx.spawn({
|
||||
async move |this, cx| {
|
||||
let Ok((declarations, rope)) = parse_task.await else {
|
||||
return;
|
||||
};
|
||||
|
||||
this.update(cx, move |this, cx| {
|
||||
this.with_state(cx, move |state| {
|
||||
let buffer_state = state
|
||||
.buffers
|
||||
.entry(buffer_id)
|
||||
.or_insert_with(Default::default);
|
||||
|
||||
SyntaxIndexState::remove_buffer_declarations(
|
||||
&buffer_state.declarations,
|
||||
&mut state.declarations,
|
||||
&mut state.identifiers,
|
||||
);
|
||||
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent = parent_index
|
||||
.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::Buffer {
|
||||
rope: rope.clone(),
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
|
||||
buffer_state.declarations = new_ids;
|
||||
});
|
||||
let declarations = declarations_in_buffer(&snapshot)
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
(
|
||||
item.parent_index,
|
||||
BufferDeclaration::from_outline(item, &rope),
|
||||
)
|
||||
})
|
||||
.ok();
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let state = state.deref_mut();
|
||||
|
||||
let buffer_state = state
|
||||
.buffers
|
||||
.entry(buffer_id)
|
||||
.or_insert_with(Default::default);
|
||||
|
||||
SyntaxIndexState::remove_buffer_declarations(
|
||||
&buffer_state.declarations,
|
||||
&mut state.declarations,
|
||||
&mut state.identifiers,
|
||||
);
|
||||
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent =
|
||||
parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::Buffer {
|
||||
rope: rope.clone(),
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
|
||||
buffer_state.declarations = new_ids;
|
||||
});
|
||||
|
||||
self.with_state(cx, move |state| {
|
||||
@@ -309,28 +467,53 @@ impl SyntaxIndex {
|
||||
entry_id: ProjectEntryId,
|
||||
project_path: ProjectPath,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
) -> Task<()> {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
return Task::ready(());
|
||||
};
|
||||
let project = project.read(cx);
|
||||
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
|
||||
return;
|
||||
|
||||
let language_registry = project.languages();
|
||||
let Some(available_language) =
|
||||
language_registry.language_for_file_path(project_path.path.as_std_path())
|
||||
else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let language = if let Some(Ok(Ok(language))) = language_registry
|
||||
.load_language(&available_language)
|
||||
.now_or_never()
|
||||
{
|
||||
if language
|
||||
.grammar()
|
||||
.is_none_or(|grammar| grammar.outline_config.is_none())
|
||||
{
|
||||
return Task::ready(());
|
||||
}
|
||||
future::Either::Left(async { Ok(language) })
|
||||
} else {
|
||||
let language_registry = language_registry.clone();
|
||||
future::Either::Right(async move {
|
||||
anyhow::Ok(
|
||||
language_registry
|
||||
.load_language(&available_language)
|
||||
.await??,
|
||||
)
|
||||
})
|
||||
};
|
||||
|
||||
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let language_registry = project.languages().clone();
|
||||
|
||||
let snapshot_task = worktree.update(cx, |worktree, cx| {
|
||||
let load_task = worktree.load_file(&project_path.path, cx);
|
||||
cx.spawn(async move |_this, cx| {
|
||||
let loaded_file = load_task.await?;
|
||||
let language = language_registry
|
||||
.language_for_file_path(&project_path.path.as_std_path())
|
||||
.await
|
||||
.ok();
|
||||
let language = language.await?;
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
let mut buffer = Buffer::local(loaded_file.text, cx);
|
||||
buffer.set_language(language, cx);
|
||||
buffer.set_language(Some(language), cx);
|
||||
buffer
|
||||
})?;
|
||||
|
||||
@@ -343,75 +526,58 @@ impl SyntaxIndex {
|
||||
})
|
||||
});
|
||||
|
||||
let parse_task = cx.background_spawn(async move {
|
||||
let snapshot = snapshot_task.await?;
|
||||
let state = Arc::downgrade(&self.state);
|
||||
cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.as_rope();
|
||||
let declarations = declarations_in_buffer(&snapshot)
|
||||
.into_iter()
|
||||
.map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
|
||||
.collect::<Vec<_>>();
|
||||
anyhow::Ok(declarations)
|
||||
});
|
||||
|
||||
let task = cx.spawn({
|
||||
async move |this, cx| {
|
||||
// TODO: how to handle errors?
|
||||
let Ok(declarations) = parse_task.await else {
|
||||
return;
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let state = state.deref_mut();
|
||||
|
||||
let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
|
||||
for old_declaration_id in &file_state.declarations {
|
||||
let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
|
||||
debug_panic!("declaration not found");
|
||||
continue;
|
||||
};
|
||||
this.update(cx, |this, cx| {
|
||||
this.with_state(cx, move |state| {
|
||||
let file_state =
|
||||
state.files.entry(entry_id).or_insert_with(Default::default);
|
||||
|
||||
for old_declaration_id in &file_state.declarations {
|
||||
let Some(declaration) = state.declarations.remove(*old_declaration_id)
|
||||
else {
|
||||
debug_panic!("declaration not found");
|
||||
continue;
|
||||
};
|
||||
if let Some(identifier_declarations) =
|
||||
state.identifiers.get_mut(declaration.identifier())
|
||||
{
|
||||
identifier_declarations.remove(old_declaration_id);
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent = parent_index
|
||||
.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
|
||||
file_state.declarations = new_ids;
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
if let Some(identifier_declarations) =
|
||||
state.identifiers.get_mut(declaration.identifier())
|
||||
{
|
||||
identifier_declarations.remove(old_declaration_id);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
self.with_state(cx, move |state| {
|
||||
state
|
||||
.files
|
||||
.entry(entry_id)
|
||||
.or_insert_with(Default::default)
|
||||
.task = Some(task);
|
||||
});
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent =
|
||||
parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
file_state.declarations = new_ids;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -576,13 +742,13 @@ mod tests {
|
||||
let decls = index_state.declarations_for_identifier::<8>(&main);
|
||||
assert_eq!(decls.len(), 2);
|
||||
|
||||
let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
|
||||
assert_eq!(decl.identifier, main.clone());
|
||||
assert_eq!(decl.item_range, 32..280);
|
||||
|
||||
let decl = expect_file_decl("a.rs", &decls[1].1, &project, cx);
|
||||
let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
|
||||
assert_eq!(decl.identifier, main);
|
||||
assert_eq!(decl.item_range, 0..98);
|
||||
|
||||
let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
|
||||
assert_eq!(decl.identifier, main.clone());
|
||||
assert_eq!(decl.item_range, 32..280);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -718,8 +884,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let decls = index_state.declarations_for_identifier::<8>(&main);
|
||||
assert_eq!(decls.len(), 2);
|
||||
expect_file_decl("c.rs", &decls[0].1, &project, cx);
|
||||
expect_file_decl("a.rs", &decls[1].1, &project, cx);
|
||||
expect_file_decl("a.rs", &decls[0].1, &project, cx);
|
||||
expect_file_decl("c.rs", &decls[1].1, &project, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -852,7 +1018,8 @@ mod tests {
|
||||
let lang_id = lang.id();
|
||||
language_registry.add(Arc::new(lang));
|
||||
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
|
||||
let file_indexing_parallelism = 2;
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
(project, index, lang_id)
|
||||
|
||||
@@ -776,6 +776,8 @@ actions!(
|
||||
UniqueLinesCaseInsensitive,
|
||||
/// Removes duplicate lines (case-sensitive).
|
||||
UniqueLinesCaseSensitive,
|
||||
/// Removes the surrounding syntax node (for example brackets, or closures)
|
||||
/// from the current selections.
|
||||
UnwrapSyntaxNode,
|
||||
/// Wraps selections in tag specified by language.
|
||||
WrapSelectionsInTag
|
||||
|
||||
@@ -1176,7 +1176,7 @@ impl DisplaySnapshot {
|
||||
.map(|(row, block)| (DisplayRow(row), block))
|
||||
}
|
||||
|
||||
pub fn sticky_header_excerpt(&self, row: f32) -> Option<StickyHeaderExcerpt<'_>> {
|
||||
pub fn sticky_header_excerpt(&self, row: f64) -> Option<StickyHeaderExcerpt<'_>> {
|
||||
self.block_snapshot.sticky_header_excerpt(row)
|
||||
}
|
||||
|
||||
@@ -1877,33 +1877,33 @@ pub mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(0), 7),
|
||||
language::SelectionGoal::HorizontalPosition(x.0)
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
movement::down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(0), 7),
|
||||
language::SelectionGoal::HorizontalPosition(x.0),
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x)),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(1), 10),
|
||||
language::SelectionGoal::HorizontalPosition(x.0)
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
movement::down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(1), 10),
|
||||
language::SelectionGoal::HorizontalPosition(x.0),
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x)),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 4),
|
||||
language::SelectionGoal::HorizontalPosition(x.0)
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x))
|
||||
)
|
||||
);
|
||||
|
||||
@@ -1920,7 +1920,7 @@ pub mod tests {
|
||||
|
||||
// Re-wrap on font size changes
|
||||
map.update(cx, |map, cx| {
|
||||
map.set_font(font("Helvetica"), px(font_size.0 + 3.), cx)
|
||||
map.set_font(font("Helvetica"), font_size + Pixels::from(3.), cx)
|
||||
});
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
@@ -1395,7 +1395,7 @@ impl BlockSnapshot {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn sticky_header_excerpt(&self, position: f32) -> Option<StickyHeaderExcerpt<'_>> {
|
||||
pub(crate) fn sticky_header_excerpt(&self, position: f64) -> Option<StickyHeaderExcerpt<'_>> {
|
||||
let top_row = position as u32;
|
||||
let mut cursor = self.transforms.cursor::<BlockRow>(());
|
||||
cursor.seek(&BlockRow(top_row), Bias::Right);
|
||||
|
||||
@@ -387,7 +387,13 @@ impl<'a> Iterator for InlayChunks<'a> {
|
||||
.right_1()
|
||||
.size_3()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_color(
|
||||
if cx.theme().appearance().is_light() {
|
||||
gpui::black().opacity(0.5)
|
||||
} else {
|
||||
gpui::white().opacity(0.5)
|
||||
},
|
||||
)
|
||||
.bg(color),
|
||||
)
|
||||
.into_any_element()
|
||||
|
||||
@@ -122,7 +122,7 @@ use itertools::{Either, Itertools};
|
||||
use language::{
|
||||
AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow,
|
||||
BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape,
|
||||
DiagnosticEntry, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
|
||||
DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
|
||||
IndentSize, Language, OffsetRangeExt, Point, Runnable, RunnableRange, Selection, SelectionGoal,
|
||||
TextObject, TransactionId, TreeSitterOptions, WordsQuery,
|
||||
language_settings::{
|
||||
@@ -142,7 +142,7 @@ use mouse_context_menu::MouseContextMenu;
|
||||
use movement::TextLayoutDetails;
|
||||
use multi_buffer::{
|
||||
ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow,
|
||||
MultiOrSingleBufferOffsetRange, ToOffsetUtf16,
|
||||
ToOffsetUtf16,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use persistence::DB;
|
||||
@@ -210,6 +210,7 @@ use crate::{
|
||||
code_context_menus::CompletionsMenuSource,
|
||||
editor_settings::MultiCursorModifier,
|
||||
hover_links::{find_url, find_url_from_range},
|
||||
scroll::{ScrollOffset, ScrollPixelOffset},
|
||||
signature_help::{SignatureHelpHiddenBy, SignatureHelpState},
|
||||
};
|
||||
|
||||
@@ -404,7 +405,7 @@ pub fn set_blame_renderer(renderer: impl BlameRenderer + 'static, cx: &mut App)
|
||||
pub trait DiagnosticRenderer {
|
||||
fn render_group(
|
||||
&self,
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
buffer_id: BufferId,
|
||||
snapshot: EditorSnapshot,
|
||||
editor: WeakEntity<Editor>,
|
||||
@@ -413,7 +414,7 @@ pub trait DiagnosticRenderer {
|
||||
|
||||
fn render_hover(
|
||||
&self,
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
range: Range<Point>,
|
||||
buffer_id: BufferId,
|
||||
cx: &mut App,
|
||||
@@ -3211,22 +3212,27 @@ impl Editor {
|
||||
let background_executor = cx.background_executor().clone();
|
||||
let editor_id = cx.entity().entity_id().as_u64() as ItemId;
|
||||
self.serialize_selections = cx.background_spawn(async move {
|
||||
background_executor.timer(SERIALIZATION_THROTTLE_TIME).await;
|
||||
let db_selections = selections
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
(
|
||||
selection.start.to_offset(&snapshot),
|
||||
selection.end.to_offset(&snapshot),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
background_executor.timer(SERIALIZATION_THROTTLE_TIME).await;
|
||||
let db_selections = selections
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
(
|
||||
selection.start.to_offset(&snapshot),
|
||||
selection.end.to_offset(&snapshot),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
DB.save_editor_selections(editor_id, workspace_id, db_selections)
|
||||
.await
|
||||
.with_context(|| format!("persisting editor selections for editor {editor_id}, workspace {workspace_id:?}"))
|
||||
.log_err();
|
||||
});
|
||||
DB.save_editor_selections(editor_id, workspace_id, db_selections)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"persisting editor selections for editor {editor_id}, \
|
||||
workspace {workspace_id:?}"
|
||||
)
|
||||
})
|
||||
.log_err();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6871,17 +6877,7 @@ impl Editor {
|
||||
continue;
|
||||
}
|
||||
|
||||
let range = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: start,
|
||||
diff_base_anchor: None,
|
||||
}..Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: end,
|
||||
diff_base_anchor: None,
|
||||
};
|
||||
let range = Anchor::range_in_buffer(excerpt_id, buffer_id, start..end);
|
||||
if highlight.kind == lsp::DocumentHighlightKind::WRITE {
|
||||
write_ranges.push(range);
|
||||
} else {
|
||||
@@ -8626,8 +8622,8 @@ impl Editor {
|
||||
self.context_menu_options = Some(options);
|
||||
}
|
||||
|
||||
const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = Pixels(24.);
|
||||
const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = Pixels(2.);
|
||||
const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = px(24.);
|
||||
const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = px(2.);
|
||||
|
||||
fn render_edit_prediction_popover(
|
||||
&mut self,
|
||||
@@ -8636,11 +8632,12 @@ impl Editor {
|
||||
right_margin: Pixels,
|
||||
editor_snapshot: &EditorSnapshot,
|
||||
visible_row_range: Range<DisplayRow>,
|
||||
scroll_top: f32,
|
||||
scroll_bottom: f32,
|
||||
scroll_top: ScrollOffset,
|
||||
scroll_bottom: ScrollOffset,
|
||||
line_layouts: &[LineWithInvisibles],
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<ScrollOffset>,
|
||||
scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
|
||||
newest_selection_head: Option<DisplayPoint>,
|
||||
editor_width: Pixels,
|
||||
style: &EditorStyle,
|
||||
@@ -8732,6 +8729,7 @@ impl Editor {
|
||||
visible_row_range,
|
||||
line_layouts,
|
||||
line_height,
|
||||
scroll_position,
|
||||
scroll_pixel_position,
|
||||
newest_selection_head,
|
||||
editor_width,
|
||||
@@ -8774,14 +8772,14 @@ impl Editor {
|
||||
visible_row_range: Range<DisplayRow>,
|
||||
line_layouts: &[LineWithInvisibles],
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
|
||||
newest_selection_head: Option<DisplayPoint>,
|
||||
target_display_point: DisplayPoint,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Option<(AnyElement, gpui::Point<Pixels>)> {
|
||||
let scrolled_content_origin =
|
||||
content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0));
|
||||
content_origin - gpui::Point::new(scroll_pixel_position.x.into(), Pixels::ZERO);
|
||||
|
||||
const SCROLL_PADDING_Y: Pixels = px(12.);
|
||||
|
||||
@@ -8816,8 +8814,8 @@ impl Editor {
|
||||
let target_column = target_display_point.column() as usize;
|
||||
|
||||
let target_x = line_layout.x_for_index(target_column);
|
||||
let target_y =
|
||||
(target_display_point.row().as_f32() * line_height) - scroll_pixel_position.y;
|
||||
let target_y = (target_display_point.row().as_f64() * f64::from(line_height))
|
||||
- scroll_pixel_position.y;
|
||||
|
||||
let flag_on_right = target_x < text_bounds.size.width / 2.;
|
||||
|
||||
@@ -8845,7 +8843,7 @@ impl Editor {
|
||||
|
||||
let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
|
||||
|
||||
let mut origin = scrolled_content_origin + point(target_x, target_y)
|
||||
let mut origin = scrolled_content_origin + point(target_x, target_y.into())
|
||||
- point(
|
||||
if flag_on_right {
|
||||
POLE_WIDTH
|
||||
@@ -8898,16 +8896,16 @@ impl Editor {
|
||||
content_origin: gpui::Point<Pixels>,
|
||||
editor_snapshot: &EditorSnapshot,
|
||||
visible_row_range: Range<DisplayRow>,
|
||||
scroll_top: f32,
|
||||
scroll_bottom: f32,
|
||||
scroll_top: ScrollOffset,
|
||||
scroll_bottom: ScrollOffset,
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
|
||||
target_display_point: DisplayPoint,
|
||||
editor_width: Pixels,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Option<(AnyElement, gpui::Point<Pixels>)> {
|
||||
if target_display_point.row().as_f32() < scroll_top {
|
||||
if target_display_point.row().as_f64() < scroll_top {
|
||||
let mut element = self
|
||||
.render_edit_prediction_line_popover(
|
||||
"Jump to Edit",
|
||||
@@ -8926,7 +8924,7 @@ impl Editor {
|
||||
let origin = text_bounds.origin + offset;
|
||||
element.prepaint_at(origin, window, cx);
|
||||
Some((element, origin))
|
||||
} else if (target_display_point.row().as_f32() + 1.) > scroll_bottom {
|
||||
} else if (target_display_point.row().as_f64() + 1.) > scroll_bottom {
|
||||
let mut element = self
|
||||
.render_edit_prediction_line_popover(
|
||||
"Jump to Edit",
|
||||
@@ -8968,7 +8966,7 @@ impl Editor {
|
||||
visible_row_range: Range<DisplayRow>,
|
||||
target_display_point: DisplayPoint,
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
|
||||
content_origin: gpui::Point<Pixels>,
|
||||
editor_width: Pixels,
|
||||
window: &mut Window,
|
||||
@@ -8987,7 +8985,7 @@ impl Editor {
|
||||
|
||||
let line_origin = self.display_to_pixel_point(target_line_end, editor_snapshot, window)?;
|
||||
|
||||
let start_point = content_origin - point(scroll_pixel_position.x, Pixels::ZERO);
|
||||
let start_point = content_origin - point(scroll_pixel_position.x.into(), Pixels::ZERO);
|
||||
let mut origin = start_point
|
||||
+ line_origin
|
||||
+ point(Self::EDIT_PREDICTION_POPOVER_PADDING_X, Pixels::ZERO);
|
||||
@@ -9028,7 +9026,8 @@ impl Editor {
|
||||
visible_row_range: Range<DisplayRow>,
|
||||
line_layouts: &[LineWithInvisibles],
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<ScrollOffset>,
|
||||
scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
|
||||
newest_selection_head: Option<DisplayPoint>,
|
||||
editor_width: Pixels,
|
||||
style: &EditorStyle,
|
||||
@@ -9141,9 +9140,11 @@ impl Editor {
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let x_after_longest =
|
||||
text_bounds.origin.x + longest_line_width + Self::EDIT_PREDICTION_POPOVER_PADDING_X
|
||||
- scroll_pixel_position.x;
|
||||
let x_after_longest = Pixels::from(
|
||||
ScrollPixelOffset::from(
|
||||
text_bounds.origin.x + longest_line_width + Self::EDIT_PREDICTION_POPOVER_PADDING_X,
|
||||
) - scroll_pixel_position.x,
|
||||
);
|
||||
|
||||
let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx);
|
||||
|
||||
@@ -9155,8 +9156,11 @@ impl Editor {
|
||||
let mut origin = if can_position_to_the_right {
|
||||
point(
|
||||
x_after_longest,
|
||||
text_bounds.origin.y + edit_start.row().as_f32() * line_height
|
||||
- scroll_pixel_position.y,
|
||||
text_bounds.origin.y
|
||||
+ Pixels::from(
|
||||
edit_start.row().as_f64() * ScrollPixelOffset::from(line_height)
|
||||
- scroll_pixel_position.y,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
let cursor_row = newest_selection_head.map(|head| head.row());
|
||||
@@ -9186,8 +9190,10 @@ impl Editor {
|
||||
|
||||
content_origin
|
||||
+ point(
|
||||
-scroll_pixel_position.x,
|
||||
row_target.as_f32() * line_height - scroll_pixel_position.y,
|
||||
Pixels::from(-scroll_pixel_position.x),
|
||||
Pixels::from(
|
||||
(row_target.as_f64() - scroll_position.y) * f64::from(line_height),
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
@@ -12886,7 +12892,7 @@ impl Editor {
|
||||
self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx);
|
||||
self.change_selections(Default::default(), window, cx, |s| {
|
||||
s.move_heads_with(|map, head, _| (movement::right(map, head), SelectionGoal::None));
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -14255,7 +14261,7 @@ impl Editor {
|
||||
let mut row = range.start.row();
|
||||
let positions =
|
||||
if let SelectionGoal::HorizontalRange { start, end } = selection.goal {
|
||||
px(start)..px(end)
|
||||
Pixels::from(start)..Pixels::from(end)
|
||||
} else {
|
||||
let start_x =
|
||||
display_map.x_for_display_point(range.start, &text_layout_details);
|
||||
@@ -15175,12 +15181,8 @@ impl Editor {
|
||||
}
|
||||
|
||||
let mut new_range = old_range.clone();
|
||||
while let Some((node, containing_range)) = buffer.syntax_ancestor(new_range.clone())
|
||||
{
|
||||
new_range = match containing_range {
|
||||
MultiOrSingleBufferOffsetRange::Single(_) => break,
|
||||
MultiOrSingleBufferOffsetRange::Multi(range) => range,
|
||||
};
|
||||
while let Some((node, range)) = buffer.syntax_ancestor(new_range.clone()) {
|
||||
new_range = range;
|
||||
if !node.is_named() {
|
||||
continue;
|
||||
}
|
||||
@@ -15310,20 +15312,14 @@ impl Editor {
|
||||
&& let Some((_, ancestor_range)) =
|
||||
buffer.syntax_ancestor(selection.start..selection.end)
|
||||
{
|
||||
match ancestor_range {
|
||||
MultiOrSingleBufferOffsetRange::Single(range) => range,
|
||||
MultiOrSingleBufferOffsetRange::Multi(range) => range,
|
||||
}
|
||||
ancestor_range
|
||||
} else {
|
||||
selection.range()
|
||||
};
|
||||
|
||||
let mut parent = child.clone();
|
||||
while let Some((_, ancestor_range)) = buffer.syntax_ancestor(parent.clone()) {
|
||||
parent = match ancestor_range {
|
||||
MultiOrSingleBufferOffsetRange::Single(range) => range,
|
||||
MultiOrSingleBufferOffsetRange::Multi(range) => range,
|
||||
};
|
||||
parent = ancestor_range;
|
||||
if parent.start < child.start || parent.end > child.end {
|
||||
break;
|
||||
}
|
||||
@@ -15883,7 +15879,7 @@ impl Editor {
|
||||
let snapshot = multi_buffer.snapshot(cx);
|
||||
if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt)
|
||||
&& let Some(buffer) = multi_buffer.buffer(buffer_id)
|
||||
&& let Some(excerpt_range) = snapshot.buffer_range_for_excerpt(excerpt)
|
||||
&& let Some(excerpt_range) = snapshot.context_range_for_excerpt(excerpt)
|
||||
{
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let excerpt_end_row = Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row;
|
||||
@@ -15899,7 +15895,7 @@ impl Editor {
|
||||
|
||||
if should_scroll_up {
|
||||
let new_scroll_position =
|
||||
current_scroll_position + gpui::Point::new(0.0, lines_to_expand as f32);
|
||||
current_scroll_position + gpui::Point::new(0.0, lines_to_expand as ScrollOffset);
|
||||
self.set_scroll_position(new_scroll_position, window, cx);
|
||||
}
|
||||
}
|
||||
@@ -15980,11 +15976,11 @@ impl Editor {
|
||||
active_group_id = Some(active_group.group_id);
|
||||
}
|
||||
|
||||
fn filtered(
|
||||
fn filtered<'a>(
|
||||
snapshot: EditorSnapshot,
|
||||
severity: GoToDiagnosticSeverityFilter,
|
||||
diagnostics: impl Iterator<Item = DiagnosticEntry<usize>>,
|
||||
) -> impl Iterator<Item = DiagnosticEntry<usize>> {
|
||||
diagnostics: impl Iterator<Item = DiagnosticEntryRef<'a, usize>>,
|
||||
) -> impl Iterator<Item = DiagnosticEntryRef<'a, usize>> {
|
||||
diagnostics
|
||||
.filter(move |entry| severity.matches(entry.diagnostic.severity))
|
||||
.filter(|entry| entry.range.start != entry.range.end)
|
||||
@@ -16008,7 +16004,7 @@ impl Editor {
|
||||
.filter(|entry| entry.range.start >= selection.start),
|
||||
);
|
||||
|
||||
let mut found: Option<DiagnosticEntry<usize>> = None;
|
||||
let mut found: Option<DiagnosticEntryRef<usize>> = None;
|
||||
if direction == Direction::Prev {
|
||||
'outer: for prev_diagnostics in [before.collect::<Vec<_>>(), after.collect::<Vec<_>>()]
|
||||
{
|
||||
@@ -16918,7 +16914,8 @@ impl Editor {
|
||||
let item_id = item.item_id();
|
||||
|
||||
if split {
|
||||
workspace.split_item(SplitDirection::Right, item, window, cx);
|
||||
let pane = workspace.adjacent_pane(window, cx);
|
||||
workspace.add_item(pane, item, None, true, true, window, cx);
|
||||
} else if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation {
|
||||
let (preview_item_id, preview_item_idx) =
|
||||
workspace.active_pane().read_with(cx, |pane, _| {
|
||||
@@ -17530,7 +17527,7 @@ impl Editor {
|
||||
fn activate_diagnostics(
|
||||
&mut self,
|
||||
buffer_id: BufferId,
|
||||
diagnostic: DiagnosticEntry<usize>,
|
||||
diagnostic: DiagnosticEntryRef<'_, usize>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -17719,7 +17716,7 @@ impl Editor {
|
||||
.map(|(line, _)| line)
|
||||
.map(SharedString::new)
|
||||
.unwrap_or_else(|| {
|
||||
SharedString::from(diagnostic_entry.diagnostic.message)
|
||||
SharedString::new(&*diagnostic_entry.diagnostic.message)
|
||||
});
|
||||
let start_anchor = snapshot.anchor_before(diagnostic_entry.range.start);
|
||||
let (Ok(i) | Err(i)) = inline_diagnostics
|
||||
@@ -18129,6 +18126,13 @@ impl Editor {
|
||||
let mut to_fold = Vec::new();
|
||||
let mut stack = vec![(0, snapshot.max_row().0, 1)];
|
||||
|
||||
let row_ranges_to_keep: Vec<Range<u32>> = self
|
||||
.selections
|
||||
.all::<Point>(cx)
|
||||
.into_iter()
|
||||
.map(|sel| sel.start.row..sel.end.row)
|
||||
.collect();
|
||||
|
||||
while let Some((mut start_row, end_row, current_level)) = stack.pop() {
|
||||
while start_row < end_row {
|
||||
match self
|
||||
@@ -18142,7 +18146,13 @@ impl Editor {
|
||||
if current_level < fold_at_level {
|
||||
stack.push((nested_start_row, nested_end_row, current_level + 1));
|
||||
} else if current_level == fold_at_level {
|
||||
to_fold.push(crease);
|
||||
// Fold iff there is no selection completely contained within the fold region
|
||||
if !row_ranges_to_keep.iter().any(|selection| {
|
||||
selection.end >= nested_start_row
|
||||
&& selection.start <= nested_end_row
|
||||
}) {
|
||||
to_fold.push(crease);
|
||||
}
|
||||
}
|
||||
|
||||
start_row = nested_end_row + 1;
|
||||
@@ -18849,7 +18859,12 @@ impl Editor {
|
||||
) {
|
||||
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let hunks = snapshot.hunks_for_ranges(self.selections.ranges(cx));
|
||||
let hunks = snapshot.hunks_for_ranges(
|
||||
self.selections
|
||||
.all(cx)
|
||||
.into_iter()
|
||||
.map(|selection| selection.range()),
|
||||
);
|
||||
let mut ranges_by_buffer = HashMap::default();
|
||||
self.transact(window, cx, |editor, _window, cx| {
|
||||
for hunk in hunks {
|
||||
@@ -21751,11 +21766,11 @@ impl Editor {
|
||||
.scroll_position(editor_snapshot)
|
||||
.y;
|
||||
|
||||
if source.row().as_f32() < scroll_top.floor() {
|
||||
if source.row().as_f64() < scroll_top.floor() {
|
||||
return None;
|
||||
}
|
||||
let source_x = editor_snapshot.x_for_display_point(source, &text_layout_details);
|
||||
let source_y = line_height * (source.row().as_f32() - scroll_top);
|
||||
let source_y = line_height * (source.row().as_f64() - scroll_top) as f32;
|
||||
Some(gpui::Point::new(source_x, source_y))
|
||||
}
|
||||
|
||||
@@ -22409,7 +22424,14 @@ fn wrap_with_prefix(
|
||||
continue;
|
||||
}
|
||||
if !preserve_existing_whitespace {
|
||||
token = " ";
|
||||
// Keep a single whitespace grapheme as-is
|
||||
if let Some(first) =
|
||||
unicode_segmentation::UnicodeSegmentation::graphemes(token, true).next()
|
||||
{
|
||||
token = first;
|
||||
} else {
|
||||
token = " ";
|
||||
}
|
||||
grapheme_len = 1;
|
||||
}
|
||||
let current_prefix_len = if is_first_line {
|
||||
@@ -22511,6 +22533,17 @@ fn test_wrap_with_prefix() {
|
||||
),
|
||||
"这是什\n么 钢\n笔"
|
||||
);
|
||||
assert_eq!(
|
||||
wrap_with_prefix(
|
||||
String::new(),
|
||||
String::new(),
|
||||
format!("foo{}bar", '\u{2009}'), // thin space
|
||||
80,
|
||||
NonZeroU32::new(4).unwrap(),
|
||||
false,
|
||||
),
|
||||
format!("foo{}bar", '\u{2009}')
|
||||
);
|
||||
}
|
||||
|
||||
pub trait CollaborationHub {
|
||||
@@ -23319,7 +23352,7 @@ impl EditorSnapshot {
|
||||
.map(|display_map| display_map.text())
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self) -> gpui::Point<f32> {
|
||||
pub fn scroll_position(&self) -> gpui::Point<ScrollOffset> {
|
||||
self.scroll_anchor.scroll_position(&self.display_snapshot)
|
||||
}
|
||||
|
||||
@@ -23885,12 +23918,16 @@ impl EntityInputHandler for Editor {
|
||||
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let scroll_position = snapshot.scroll_position();
|
||||
let scroll_left = scroll_position.x * em_advance;
|
||||
let scroll_left = scroll_position.x * ScrollOffset::from(em_advance);
|
||||
|
||||
let start = OffsetUtf16(range_utf16.start).to_display_point(&snapshot);
|
||||
let x = snapshot.x_for_display_point(start, &text_layout_details) - scroll_left
|
||||
+ self.gutter_dimensions.full_width();
|
||||
let y = line_height * (start.row().as_f32() - scroll_position.y);
|
||||
let x = Pixels::from(
|
||||
ScrollOffset::from(
|
||||
snapshot.x_for_display_point(start, &text_layout_details)
|
||||
+ self.gutter_dimensions.full_width(),
|
||||
) - scroll_left,
|
||||
);
|
||||
let y = line_height * (start.row().as_f64() - scroll_position.y) as f32;
|
||||
|
||||
Some(Bounds {
|
||||
origin: element_bounds.origin + point(x, y),
|
||||
@@ -24157,7 +24194,7 @@ impl<T: ToOffset> RangeToAnchorExt for Range<T> {
|
||||
}
|
||||
|
||||
pub trait RowExt {
|
||||
fn as_f32(&self) -> f32;
|
||||
fn as_f64(&self) -> f64;
|
||||
|
||||
fn next_row(&self) -> Self;
|
||||
|
||||
@@ -24167,8 +24204,8 @@ pub trait RowExt {
|
||||
}
|
||||
|
||||
impl RowExt for DisplayRow {
|
||||
fn as_f32(&self) -> f32 {
|
||||
self.0 as f32
|
||||
fn as_f64(&self) -> f64 {
|
||||
self.0 as _
|
||||
}
|
||||
|
||||
fn next_row(&self) -> Self {
|
||||
@@ -24185,8 +24222,8 @@ impl RowExt for DisplayRow {
|
||||
}
|
||||
|
||||
impl RowExt for MultiBufferRow {
|
||||
fn as_f32(&self) -> f32 {
|
||||
self.0 as f32
|
||||
fn as_f64(&self) -> f64 {
|
||||
self.0 as _
|
||||
}
|
||||
|
||||
fn next_row(&self) -> Self {
|
||||
@@ -24457,8 +24494,8 @@ fn all_edits_insertions_or_deletions(
|
||||
struct MissingEditPredictionKeybindingTooltip;
|
||||
|
||||
impl Render for MissingEditPredictionKeybindingTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
ui::tooltip_container(window, cx, |container, _, cx| {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
ui::tooltip_container(cx, |container, cx| {
|
||||
container
|
||||
.flex_shrink_0()
|
||||
.max_w_80()
|
||||
|
||||
@@ -25,13 +25,12 @@ pub struct EditorSettings {
|
||||
pub lsp_highlight_debounce: u64,
|
||||
pub hover_popover_enabled: bool,
|
||||
pub hover_popover_delay: u64,
|
||||
pub status_bar: StatusBar,
|
||||
pub toolbar: Toolbar,
|
||||
pub scrollbar: Scrollbar,
|
||||
pub minimap: Minimap,
|
||||
pub gutter: Gutter,
|
||||
pub scroll_beyond_last_line: ScrollBeyondLastLine,
|
||||
pub vertical_scroll_margin: f32,
|
||||
pub vertical_scroll_margin: f64,
|
||||
pub autoscroll_on_clicks: bool,
|
||||
pub horizontal_scroll_margin: f32,
|
||||
pub scroll_sensitivity: f32,
|
||||
@@ -67,18 +66,6 @@ pub struct Jupyter {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct StatusBar {
|
||||
/// Whether to display the active language button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
pub active_language_button: bool,
|
||||
/// Whether to show the cursor position button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
pub cursor_position_button: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Toolbar {
|
||||
pub breadcrumbs: bool,
|
||||
@@ -195,7 +182,6 @@ impl Settings for EditorSettings {
|
||||
let minimap = editor.minimap.unwrap();
|
||||
let gutter = editor.gutter.unwrap();
|
||||
let axes = scrollbar.axes.unwrap();
|
||||
let status_bar = editor.status_bar.unwrap();
|
||||
let toolbar = editor.toolbar.unwrap();
|
||||
let search = editor.search.unwrap();
|
||||
let drag_and_drop_selection = editor.drag_and_drop_selection.unwrap();
|
||||
@@ -208,10 +194,6 @@ impl Settings for EditorSettings {
|
||||
lsp_highlight_debounce: editor.lsp_highlight_debounce.unwrap(),
|
||||
hover_popover_enabled: editor.hover_popover_enabled.unwrap(),
|
||||
hover_popover_delay: editor.hover_popover_delay.unwrap(),
|
||||
status_bar: StatusBar {
|
||||
active_language_button: status_bar.active_language_button.unwrap(),
|
||||
cursor_position_button: status_bar.cursor_position_button.unwrap(),
|
||||
},
|
||||
toolbar: Toolbar {
|
||||
breadcrumbs: toolbar.breadcrumbs.unwrap(),
|
||||
quick_actions: toolbar.quick_actions.unwrap(),
|
||||
@@ -248,7 +230,7 @@ impl Settings for EditorSettings {
|
||||
folds: gutter.folds.unwrap(),
|
||||
},
|
||||
scroll_beyond_last_line: editor.scroll_beyond_last_line.unwrap(),
|
||||
vertical_scroll_margin: editor.vertical_scroll_margin.unwrap(),
|
||||
vertical_scroll_margin: editor.vertical_scroll_margin.unwrap() as f64,
|
||||
autoscroll_on_clicks: editor.autoscroll_on_clicks.unwrap(),
|
||||
horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(),
|
||||
scroll_sensitivity: editor.scroll_sensitivity.unwrap(),
|
||||
|
||||
@@ -5,8 +5,7 @@ use project::project_settings::ProjectSettings;
|
||||
use settings::{EditableSettingControl, Settings, SettingsContent};
|
||||
use theme::{FontFamilyCache, FontFamilyName, ThemeSettings};
|
||||
use ui::{
|
||||
CheckboxWithLabel, ContextMenu, DropdownMenu, NumericStepper, SettingsContainer, SettingsGroup,
|
||||
prelude::*,
|
||||
CheckboxWithLabel, ContextMenu, DropdownMenu, SettingsContainer, SettingsGroup, prelude::*,
|
||||
};
|
||||
|
||||
use crate::EditorSettings;
|
||||
@@ -129,21 +128,12 @@ impl EditableSettingControl for BufferFontSizeControl {
|
||||
|
||||
impl RenderOnce for BufferFontSizeControl {
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
let value = Self::read(cx);
|
||||
let _value = Self::read(cx);
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Icon::new(IconName::FontSize))
|
||||
.child(NumericStepper::new(
|
||||
"buffer-font-size",
|
||||
value.to_string(),
|
||||
move |_, _, cx| {
|
||||
Self::write(value - px(1.), cx);
|
||||
},
|
||||
move |_, _, cx| {
|
||||
Self::write(value + px(1.), cx);
|
||||
},
|
||||
))
|
||||
.child(div()) // TODO: Re-evaluate this whole crate once settings UI is complete
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::{
|
||||
};
|
||||
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind};
|
||||
use collections::HashMap;
|
||||
use futures::StreamExt;
|
||||
use futures::{StreamExt, channel::oneshot};
|
||||
use gpui::{
|
||||
BackgroundExecutor, DismissEvent, Rgba, SemanticVersion, TestAppContext, UpdateGlobal,
|
||||
VisualTestContext, WindowBounds, WindowOptions, div,
|
||||
@@ -782,12 +782,12 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
|
||||
assert!(pop_history(&mut editor, cx).is_none());
|
||||
|
||||
// Set scroll position to check later
|
||||
editor.set_scroll_position(gpui::Point::<f32>::new(5.5, 5.5), window, cx);
|
||||
editor.set_scroll_position(gpui::Point::<f64>::new(5.5, 5.5), window, cx);
|
||||
let original_scroll_position = editor.scroll_manager.anchor();
|
||||
|
||||
// Jump to the end of the document and adjust scroll
|
||||
editor.move_to_end(&MoveToEnd, window, cx);
|
||||
editor.set_scroll_position(gpui::Point::<f32>::new(-2.5, -0.5), window, cx);
|
||||
editor.set_scroll_position(gpui::Point::<f64>::new(-2.5, -0.5), window, cx);
|
||||
assert_ne!(editor.scroll_manager.anchor(), original_scroll_position);
|
||||
|
||||
let nav_entry = pop_history(&mut editor, cx).unwrap();
|
||||
@@ -817,7 +817,7 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
|
||||
);
|
||||
assert_eq!(
|
||||
editor.scroll_position(cx),
|
||||
gpui::Point::new(0., editor.max_point(cx).row().as_f32())
|
||||
gpui::Point::new(0., editor.max_point(cx).row().as_f64())
|
||||
);
|
||||
|
||||
editor
|
||||
@@ -1256,6 +1256,63 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
|
||||
editor.display_text(cx),
|
||||
editor.buffer.read(cx).read(cx).text()
|
||||
);
|
||||
let (_, positions) = marked_text_ranges(
|
||||
&"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
p«riˇ»nt(2)
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():
|
||||
«ˇprint(1)
|
||||
|
||||
def b():
|
||||
print(2)»
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
true,
|
||||
);
|
||||
|
||||
editor.change_selections(SelectionEffects::default(), window, cx, |s| {
|
||||
s.select_ranges(positions)
|
||||
});
|
||||
|
||||
editor.fold_at_level(&FoldAtLevel(2), window, cx);
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"
|
||||
class Foo:
|
||||
# Hello!
|
||||
|
||||
def a():⋯
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
class Bar:
|
||||
# World!
|
||||
|
||||
def a():
|
||||
print(1)
|
||||
|
||||
def b():
|
||||
print(2)
|
||||
|
||||
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9161,6 +9218,64 @@ async fn test_unwrap_syntax_nodes(cx: &mut gpui::TestAppContext) {
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! { r#"use mod1::{mod2::«mod3ˇ», mod5::«mod7ˇ»};"# });
|
||||
|
||||
cx.set_state(indoc! { r#"fn a() {
|
||||
// what
|
||||
// a
|
||||
// ˇlong
|
||||
// method
|
||||
// I
|
||||
// sure
|
||||
// hope
|
||||
// it
|
||||
// works
|
||||
}"# });
|
||||
|
||||
let buffer = cx.update_multibuffer(|multibuffer, _| multibuffer.as_singleton().unwrap());
|
||||
let multi_buffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
cx.update(|_, cx| {
|
||||
multi_buffer.update(cx, |multi_buffer, cx| {
|
||||
multi_buffer.set_excerpts_for_path(
|
||||
PathKey::for_buffer(&buffer, cx),
|
||||
buffer,
|
||||
[Point::new(1, 0)..Point::new(1, 0)],
|
||||
3,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
let editor2 = cx.new_window_entity(|window, cx| {
|
||||
Editor::new(EditorMode::full(), multi_buffer, None, window, cx)
|
||||
});
|
||||
|
||||
let mut cx = EditorTestContext::for_editor_in(editor2, &mut cx).await;
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.change_selections(SelectionEffects::default(), window, cx, |s| {
|
||||
s.select_ranges([Point::new(3, 0)..Point::new(3, 0)]);
|
||||
})
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! { "
|
||||
fn a() {
|
||||
// what
|
||||
// a
|
||||
ˇ // long
|
||||
// method"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.unwrap_syntax_node(&UnwrapSyntaxNode, window, cx);
|
||||
});
|
||||
|
||||
// Although we could potentially make the action work when the syntax node
|
||||
// is half-hidden, it seems a bit dangerous as you can't easily tell what it
|
||||
// did. Maybe we could also expand the excerpt to contain the range?
|
||||
cx.assert_editor_state(indoc! { "
|
||||
fn a() {
|
||||
// what
|
||||
// a
|
||||
ˇ // long
|
||||
// method"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -11816,14 +11931,8 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
|
||||
settings.defaults.remove_trailing_whitespace_on_save = Some(true);
|
||||
settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![
|
||||
Formatter::LanguageServer { name: None },
|
||||
Formatter::CodeActions(
|
||||
[
|
||||
("code-action-1".into(), true),
|
||||
("code-action-2".into(), true),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
Formatter::CodeAction("code-action-1".into()),
|
||||
Formatter::CodeAction("code-action-2".into()),
|
||||
])))
|
||||
});
|
||||
|
||||
@@ -11876,17 +11985,16 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
|
||||
);
|
||||
fake_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
|
||||
move |params, _| async move {
|
||||
assert_eq!(
|
||||
params.context.only,
|
||||
Some(vec!["code-action-1".into(), "code-action-2".into()])
|
||||
);
|
||||
let requested_code_actions = params.context.only.expect("Expected code action request");
|
||||
assert_eq!(requested_code_actions.len(), 1);
|
||||
|
||||
let uri = lsp::Uri::from_file_path(path!("/file.rs")).unwrap();
|
||||
Ok(Some(vec![
|
||||
lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
|
||||
let code_action = match requested_code_actions[0].as_str() {
|
||||
"code-action-1" => lsp::CodeAction {
|
||||
kind: Some("code-action-1".into()),
|
||||
edit: Some(lsp::WorkspaceEdit::new(
|
||||
[(
|
||||
uri.clone(),
|
||||
uri,
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
|
||||
"applied-code-action-1-edit\n".to_string(),
|
||||
@@ -11900,8 +12008,8 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
|
||||
},
|
||||
"code-action-2" => lsp::CodeAction {
|
||||
kind: Some("code-action-2".into()),
|
||||
edit: Some(lsp::WorkspaceEdit::new(
|
||||
[(
|
||||
@@ -11915,8 +12023,12 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
|
||||
.collect(),
|
||||
)),
|
||||
..Default::default()
|
||||
}),
|
||||
]))
|
||||
},
|
||||
req => panic!("Unexpected code action request: {:?}", req),
|
||||
};
|
||||
Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
|
||||
code_action,
|
||||
)]))
|
||||
},
|
||||
);
|
||||
|
||||
@@ -16403,7 +16515,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
leader.update(cx, |leader, cx| {
|
||||
leader.buffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(1, "b.txt".into()),
|
||||
PathKey::namespaced(1, rel_path("b.txt").into_arc()),
|
||||
buffer_1.clone(),
|
||||
vec![
|
||||
Point::row_range(0..3),
|
||||
@@ -16414,7 +16526,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
cx,
|
||||
);
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(1, "a.txt".into()),
|
||||
PathKey::namespaced(1, rel_path("a.txt").into_arc()),
|
||||
buffer_2.clone(),
|
||||
vec![Point::row_range(0..6), Point::row_range(8..12)],
|
||||
0,
|
||||
@@ -20913,10 +21025,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
|
||||
for buffer in &buffers {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(
|
||||
0,
|
||||
buffer.read(cx).file().unwrap().path().as_unix_str().into(),
|
||||
),
|
||||
PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
buffer.clone(),
|
||||
vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
|
||||
2,
|
||||
@@ -26241,6 +26350,118 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_race_in_multibuffer_save(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
"first.rs": "# First Document\nSome content here.",
|
||||
"second.rs": "Plain text content for second file.",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
|
||||
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let language = rust_lang();
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(language.clone());
|
||||
let mut fake_servers = language_registry.register_fake_lsp(
|
||||
"Rust",
|
||||
FakeLspAdapter {
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
);
|
||||
|
||||
let buffer1 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(PathBuf::from(path!("/project/first.rs")), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let buffer2 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(PathBuf::from(path!("/project/second.rs")), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let multi_buffer = cx.new(|cx| {
|
||||
let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
|
||||
multi_buffer.set_excerpts_for_path(
|
||||
PathKey::for_buffer(&buffer1, cx),
|
||||
buffer1.clone(),
|
||||
[Point::zero()..buffer1.read(cx).max_point()],
|
||||
3,
|
||||
cx,
|
||||
);
|
||||
multi_buffer.set_excerpts_for_path(
|
||||
PathKey::for_buffer(&buffer2, cx),
|
||||
buffer2.clone(),
|
||||
[Point::zero()..buffer1.read(cx).max_point()],
|
||||
3,
|
||||
cx,
|
||||
);
|
||||
multi_buffer
|
||||
});
|
||||
|
||||
let (editor, cx) = cx.add_window_view(|window, cx| {
|
||||
Editor::new(
|
||||
EditorMode::full(),
|
||||
multi_buffer,
|
||||
Some(project.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let fake_language_server = fake_servers.next().await.unwrap();
|
||||
|
||||
buffer1.update(cx, |buffer, cx| buffer.edit([(0..0, "hello!")], None, cx));
|
||||
|
||||
let save = editor.update_in(cx, |editor, window, cx| {
|
||||
assert!(editor.is_dirty(cx));
|
||||
|
||||
editor.save(
|
||||
SaveOptions {
|
||||
format: true,
|
||||
autosave: true,
|
||||
},
|
||||
project,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (start_edit_tx, start_edit_rx) = oneshot::channel();
|
||||
let (done_edit_tx, done_edit_rx) = oneshot::channel();
|
||||
let mut done_edit_rx = Some(done_edit_rx);
|
||||
let mut start_edit_tx = Some(start_edit_tx);
|
||||
|
||||
fake_language_server.set_request_handler::<lsp::request::Formatting, _, _>(move |_, _| {
|
||||
start_edit_tx.take().unwrap().send(()).unwrap();
|
||||
let done_edit_rx = done_edit_rx.take().unwrap();
|
||||
async move {
|
||||
done_edit_rx.await.unwrap();
|
||||
Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
start_edit_rx.await.unwrap();
|
||||
buffer2
|
||||
.update(cx, |buffer, cx| buffer.edit([(0..0, "world!")], None, cx))
|
||||
.unwrap();
|
||||
|
||||
done_edit_tx.send(()).unwrap();
|
||||
|
||||
save.await.unwrap();
|
||||
cx.update(|_, cx| assert!(editor.is_dirty(cx)));
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec<Rgba> {
|
||||
editor
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -95,6 +95,7 @@ pub trait BlameRenderer {
|
||||
_: Entity<Editor>,
|
||||
_: usize,
|
||||
_: Hsla,
|
||||
window: &mut Window,
|
||||
_: &mut App,
|
||||
) -> Option<AnyElement>;
|
||||
|
||||
@@ -142,6 +143,7 @@ impl BlameRenderer for () {
|
||||
_: Entity<Editor>,
|
||||
_: usize,
|
||||
_: Hsla,
|
||||
_: &mut Window,
|
||||
_: &mut App,
|
||||
) -> Option<AnyElement> {
|
||||
None
|
||||
@@ -673,8 +675,8 @@ async fn parse_commit_messages(
|
||||
.as_ref()
|
||||
.map(|(provider, remote)| GitRemote {
|
||||
host: provider.clone(),
|
||||
owner: remote.owner.to_string(),
|
||||
repo: remote.repo.to_string(),
|
||||
owner: remote.owner.clone().into(),
|
||||
repo: remote.repo.clone().into(),
|
||||
});
|
||||
|
||||
let pull_request = parsed_remote_url
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
Anchor, Editor, EditorSettings, EditorSnapshot, FindAllReferences, GoToDefinition,
|
||||
GoToTypeDefinition, GotoDefinitionKind, InlayId, Navigated, PointForPosition, SelectPhase,
|
||||
GoToDefinitionSplit, GoToTypeDefinition, GoToTypeDefinitionSplit, GotoDefinitionKind, InlayId,
|
||||
Navigated, PointForPosition, SelectPhase,
|
||||
editor_settings::GoToDefinitionFallback,
|
||||
hover_popover::{self, InlayHover},
|
||||
scroll::ScrollAmount,
|
||||
@@ -266,10 +267,13 @@ impl Editor {
|
||||
);
|
||||
|
||||
let navigate_task = if point.as_valid().is_some() {
|
||||
if modifiers.shift {
|
||||
self.go_to_type_definition(&GoToTypeDefinition, window, cx)
|
||||
} else {
|
||||
self.go_to_definition(&GoToDefinition, window, cx)
|
||||
match (modifiers.shift, modifiers.alt) {
|
||||
(true, true) => {
|
||||
self.go_to_type_definition_split(&GoToTypeDefinitionSplit, window, cx)
|
||||
}
|
||||
(true, false) => self.go_to_type_definition(&GoToTypeDefinition, window, cx),
|
||||
(false, true) => self.go_to_definition_split(&GoToDefinitionSplit, window, cx),
|
||||
(false, false) => self.go_to_definition(&GoToDefinition, window, cx),
|
||||
}
|
||||
} else {
|
||||
Task::ready(Ok(Navigated::No))
|
||||
@@ -297,14 +301,10 @@ pub fn update_inlay_link_and_hover_points(
|
||||
let mut hover_updated = false;
|
||||
if let Some(hovered_offset) = hovered_offset {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let previous_valid_anchor = buffer_snapshot.anchor_at(
|
||||
point_for_position.previous_valid.to_point(snapshot),
|
||||
Bias::Left,
|
||||
);
|
||||
let next_valid_anchor = buffer_snapshot.anchor_at(
|
||||
point_for_position.next_valid.to_point(snapshot),
|
||||
Bias::Right,
|
||||
);
|
||||
let previous_valid_anchor =
|
||||
buffer_snapshot.anchor_before(point_for_position.previous_valid.to_point(snapshot));
|
||||
let next_valid_anchor =
|
||||
buffer_snapshot.anchor_after(point_for_position.next_valid.to_point(snapshot));
|
||||
if let Some(hovered_hint) = editor
|
||||
.visible_inlay_hints(cx)
|
||||
.into_iter()
|
||||
@@ -1396,7 +1396,7 @@ mod tests {
|
||||
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
let expected_highlight = InlayHighlight {
|
||||
inlay: InlayId::Hint(0),
|
||||
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
|
||||
inlay_position: buffer_snapshot.anchor_after(inlay_range.start),
|
||||
range: 0..hint_label.len(),
|
||||
};
|
||||
assert_set_eq!(actual_highlights, vec![&expected_highlight]);
|
||||
|
||||
@@ -16,7 +16,7 @@ use itertools::Itertools;
|
||||
use language::{DiagnosticEntry, Language, LanguageRegistry};
|
||||
use lsp::DiagnosticSeverity;
|
||||
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
use multi_buffer::{MultiOrSingleBufferOffsetRange, ToOffset, ToPoint};
|
||||
use multi_buffer::{ToOffset, ToPoint};
|
||||
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart};
|
||||
use settings::Settings;
|
||||
use std::{borrow::Cow, cell::RefCell};
|
||||
@@ -371,7 +371,7 @@ fn show_hover(
|
||||
this.update(cx, |_, cx| cx.observe(&markdown, |_, _, cx| cx.notify()))?;
|
||||
|
||||
let local_diagnostic = DiagnosticEntry {
|
||||
diagnostic: local_diagnostic.diagnostic,
|
||||
diagnostic: local_diagnostic.diagnostic.to_owned(),
|
||||
range: snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_before(local_diagnostic.range.start)
|
||||
@@ -477,13 +477,8 @@ fn show_hover(
|
||||
})
|
||||
.or_else(|| {
|
||||
let snapshot = &snapshot.buffer_snapshot;
|
||||
match snapshot.syntax_ancestor(anchor..anchor)?.1 {
|
||||
MultiOrSingleBufferOffsetRange::Multi(range) => Some(
|
||||
snapshot.anchor_before(range.start)
|
||||
..snapshot.anchor_after(range.end),
|
||||
),
|
||||
MultiOrSingleBufferOffsetRange::Single(_) => None,
|
||||
}
|
||||
let range = snapshot.syntax_ancestor(anchor..anchor)?.1;
|
||||
Some(snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end))
|
||||
})
|
||||
.unwrap_or_else(|| anchor..anchor);
|
||||
|
||||
@@ -1790,7 +1785,7 @@ mod tests {
|
||||
popover.symbol_range,
|
||||
RangeInEditor::Inlay(InlayHighlight {
|
||||
inlay: InlayId::Hint(0),
|
||||
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
|
||||
inlay_position: buffer_snapshot.anchor_after(inlay_range.start),
|
||||
range: ": ".len()..": ".len() + new_type_label.len(),
|
||||
}),
|
||||
"Popover range should match the new type label part"
|
||||
@@ -1845,7 +1840,7 @@ mod tests {
|
||||
popover.symbol_range,
|
||||
RangeInEditor::Inlay(InlayHighlight {
|
||||
inlay: InlayId::Hint(0),
|
||||
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
|
||||
inlay_position: buffer_snapshot.anchor_after(inlay_range.start),
|
||||
range: ": ".len() + new_type_label.len() + "<".len()
|
||||
..": ".len() + new_type_label.len() + "<".len() + struct_label.len(),
|
||||
}),
|
||||
|
||||
@@ -2251,7 +2251,7 @@ pub mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
#[gpui::test(iterations = 4)]
|
||||
async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettingsContent {
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::{
|
||||
display_map::HighlightKey,
|
||||
editor_settings::SeedQuerySetting,
|
||||
persistence::{DB, SerializedEditor},
|
||||
scroll::ScrollAnchor,
|
||||
scroll::{ScrollAnchor, ScrollOffset},
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::{HashMap, HashSet};
|
||||
@@ -578,12 +578,11 @@ fn deserialize_selection(
|
||||
|
||||
fn deserialize_anchor(buffer: &MultiBufferSnapshot, anchor: proto::EditorAnchor) -> Option<Anchor> {
|
||||
let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id);
|
||||
Some(Anchor {
|
||||
Some(Anchor::in_buffer(
|
||||
excerpt_id,
|
||||
text_anchor: language::proto::deserialize_anchor(anchor.anchor?)?,
|
||||
buffer_id: buffer.buffer_id_for_excerpt(excerpt_id),
|
||||
diff_base_anchor: None,
|
||||
})
|
||||
buffer.buffer_id_for_excerpt(excerpt_id)?,
|
||||
language::proto::deserialize_anchor(anchor.anchor?)?,
|
||||
))
|
||||
}
|
||||
|
||||
impl Item for Editor {
|
||||
@@ -833,12 +832,11 @@ impl Item for Editor {
|
||||
|
||||
// let mut buffers_to_save =
|
||||
let buffers_to_save = if self.buffer.read(cx).is_singleton() && !options.autosave {
|
||||
buffers.clone()
|
||||
buffers
|
||||
} else {
|
||||
buffers
|
||||
.iter()
|
||||
.into_iter()
|
||||
.filter(|buffer| buffer.read(cx).is_dirty())
|
||||
.cloned()
|
||||
.collect()
|
||||
};
|
||||
|
||||
@@ -864,22 +862,6 @@ impl Item for Editor {
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Notify about clean buffers for language server events
|
||||
let buffers_that_were_not_saved: Vec<_> = buffers
|
||||
.into_iter()
|
||||
.filter(|b| !buffers_to_save.contains(b))
|
||||
.collect();
|
||||
|
||||
for buffer in buffers_that_were_not_saved {
|
||||
buffer
|
||||
.update(cx, |buffer, cx| {
|
||||
let version = buffer.saved_version().clone();
|
||||
let mtime = buffer.saved_mtime();
|
||||
buffer.did_save(version, mtime, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -1339,7 +1321,7 @@ struct EditorRestorationData {
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct RestorationData {
|
||||
pub scroll_position: (BufferRow, gpui::Point<f32>),
|
||||
pub scroll_position: (BufferRow, gpui::Point<ScrollOffset>),
|
||||
pub folds: Vec<Range<Point>>,
|
||||
pub selections: Vec<Range<Point>>,
|
||||
}
|
||||
@@ -1752,13 +1734,8 @@ impl SearchableItem for Editor {
|
||||
.anchor_after(search_range.start + match_range.start);
|
||||
let end = search_buffer
|
||||
.anchor_before(search_range.start + match_range.end);
|
||||
Anchor {
|
||||
diff_base_anchor: Some(start),
|
||||
..deleted_hunk_anchor
|
||||
}..Anchor {
|
||||
diff_base_anchor: Some(end),
|
||||
..deleted_hunk_anchor
|
||||
}
|
||||
deleted_hunk_anchor.with_diff_base_anchor(start)
|
||||
..deleted_hunk_anchor.with_diff_base_anchor(end)
|
||||
} else {
|
||||
let start = search_buffer
|
||||
.anchor_after(search_range.start + match_range.start);
|
||||
|
||||
@@ -54,7 +54,7 @@ impl MouseContextMenu {
|
||||
let content_origin = editor.last_bounds?.origin
|
||||
+ Point {
|
||||
x: editor.gutter_dimensions.width,
|
||||
y: Pixels(0.0),
|
||||
y: Pixels::ZERO,
|
||||
};
|
||||
let source_position = editor.to_pixel_point(source, &editor_snapshot, window)?;
|
||||
let menu_position = MenuPosition::PinnedToEditor {
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
//! in editor given a given motion (e.g. it handles converting a "move left" command into coordinates in editor). It is exposed mostly for use by vim crate.
|
||||
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor};
|
||||
use crate::{
|
||||
DisplayRow, EditorStyle, ToOffset, ToPoint,
|
||||
scroll::{ScrollAnchor, ScrollOffset},
|
||||
};
|
||||
use gpui::{Pixels, WindowTextSystem};
|
||||
use language::{CharClassifier, Point};
|
||||
use multi_buffer::{MultiBufferRow, MultiBufferSnapshot};
|
||||
@@ -27,8 +30,8 @@ pub struct TextLayoutDetails {
|
||||
pub(crate) editor_style: EditorStyle,
|
||||
pub(crate) rem_size: Pixels,
|
||||
pub scroll_anchor: ScrollAnchor,
|
||||
pub visible_rows: Option<f32>,
|
||||
pub vertical_scroll_margin: f32,
|
||||
pub visible_rows: Option<f64>,
|
||||
pub vertical_scroll_margin: ScrollOffset,
|
||||
}
|
||||
|
||||
/// Returns a column to the left of the current point, wrapping
|
||||
@@ -1018,22 +1021,22 @@ mod tests {
|
||||
[
|
||||
Inlay::edit_prediction(
|
||||
post_inc(&mut id),
|
||||
buffer_snapshot.anchor_at(offset, Bias::Left),
|
||||
buffer_snapshot.anchor_before(offset),
|
||||
"test",
|
||||
),
|
||||
Inlay::edit_prediction(
|
||||
post_inc(&mut id),
|
||||
buffer_snapshot.anchor_at(offset, Bias::Right),
|
||||
buffer_snapshot.anchor_after(offset),
|
||||
"test",
|
||||
),
|
||||
Inlay::mock_hint(
|
||||
post_inc(&mut id),
|
||||
buffer_snapshot.anchor_at(offset, Bias::Left),
|
||||
buffer_snapshot.anchor_before(offset),
|
||||
"test",
|
||||
),
|
||||
Inlay::mock_hint(
|
||||
post_inc(&mut id),
|
||||
buffer_snapshot.anchor_at(offset, Bias::Right),
|
||||
buffer_snapshot.anchor_after(offset),
|
||||
"test",
|
||||
),
|
||||
]
|
||||
@@ -1220,13 +1223,13 @@ mod tests {
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(0), 2),
|
||||
SelectionGoal::HorizontalPosition(col_2_x.0),
|
||||
SelectionGoal::HorizontalPosition(f64::from(col_2_x)),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(0), 0),
|
||||
SelectionGoal::HorizontalPosition(col_2_x.0),
|
||||
SelectionGoal::HorizontalPosition(f64::from(col_2_x)),
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -1251,26 +1254,26 @@ mod tests {
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(1), 4),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.into()),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(0), 3),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0)
|
||||
SelectionGoal::HorizontalPosition(col_4_x.into())
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(0), 3),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.into()),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(1), 4),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0)
|
||||
SelectionGoal::HorizontalPosition(col_4_x.into())
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1282,26 +1285,26 @@ mod tests {
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(3), 5),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.into()),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(1), 4),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0)
|
||||
SelectionGoal::HorizontalPosition(col_5_x.into())
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(1), 4),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.into()),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(3), 5),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0)
|
||||
SelectionGoal::HorizontalPosition(col_5_x.into())
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1326,13 +1329,13 @@ mod tests {
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(4), 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.into()),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(4), 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0)
|
||||
SelectionGoal::HorizontalPosition(max_point_x.into())
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user