Compare commits
199 Commits
vim-set-ic
...
static-rel
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6946ad4e8 | ||
|
|
c9972c2972 | ||
|
|
afdc53fdb7 | ||
|
|
d2e5947cf3 | ||
|
|
b02b130b7c | ||
|
|
41ac6a8764 | ||
|
|
963204c99d | ||
|
|
f6f11eb544 | ||
|
|
c1e917165d | ||
|
|
a2a7bd139a | ||
|
|
4de13e06ec | ||
|
|
e680dfb0a0 | ||
|
|
31544d294d | ||
|
|
4e932297a4 | ||
|
|
b2f0b1b168 | ||
|
|
94f1faffa7 | ||
|
|
075104a529 | ||
|
|
c80d213227 | ||
|
|
fe9895d112 | ||
|
|
24bc52a15a | ||
|
|
a65a8bea43 | ||
|
|
ea60a7b172 | ||
|
|
a67a55d81a | ||
|
|
1a9f9ccc29 | ||
|
|
6da5945cd2 | ||
|
|
354cc65daa | ||
|
|
2c6a8634cc | ||
|
|
84ec865c44 | ||
|
|
80727a03bf | ||
|
|
e7339fbd42 | ||
|
|
db5b1a31b5 | ||
|
|
bc39ed2575 | ||
|
|
1764337a5d | ||
|
|
3707102702 | ||
|
|
5263f51432 | ||
|
|
93cd10aaa8 | ||
|
|
2c1cc01b81 | ||
|
|
81ada92306 | ||
|
|
4bd7ef8bad | ||
|
|
d1e2a1f20c | ||
|
|
79a8986cb7 | ||
|
|
d2b91eb2bc | ||
|
|
c26937a848 | ||
|
|
da82eec4cb | ||
|
|
2bfcd60b88 | ||
|
|
9c7369f54d | ||
|
|
5160510ed0 | ||
|
|
ee557fb7ea | ||
|
|
f9919f9214 | ||
|
|
0f0974f105 | ||
|
|
e317d98915 | ||
|
|
dada318be7 | ||
|
|
b53f9c8863 | ||
|
|
5b0a2f1ab6 | ||
|
|
d5a4890142 | ||
|
|
cd61bfbd42 | ||
|
|
469ecfbe13 | ||
|
|
46b6adadf9 | ||
|
|
1a9e9c5faa | ||
|
|
eb64ca8758 | ||
|
|
68e6d55596 | ||
|
|
bcd2d269e2 | ||
|
|
b32075cdcb | ||
|
|
21e75b8221 | ||
|
|
978951b79a | ||
|
|
6b980ecad3 | ||
|
|
d9c7f44b0b | ||
|
|
55e68553a4 | ||
|
|
9fe46dc8d2 | ||
|
|
aced13bc9f | ||
|
|
2859cbdba9 | ||
|
|
4443f61c16 | ||
|
|
f0f0beb42f | ||
|
|
6707ff3b50 | ||
|
|
93770e8314 | ||
|
|
f8c617303a | ||
|
|
e5f05a21ce | ||
|
|
f499504b13 | ||
|
|
504216cbbf | ||
|
|
3bf71c690f | ||
|
|
456ba32ea7 | ||
|
|
9aeb617a89 | ||
|
|
fd8bae9b72 | ||
|
|
f71c9122ca | ||
|
|
8441aa49b2 | ||
|
|
7b96e1cf1a | ||
|
|
86322a186f | ||
|
|
1b94d74dc3 | ||
|
|
db825c1141 | ||
|
|
f3abd1dab5 | ||
|
|
662ec9977f | ||
|
|
3ab5103de1 | ||
|
|
39bd03b92d | ||
|
|
1fffcb99ba | ||
|
|
e4f90b5da2 | ||
|
|
dc6fad9659 | ||
|
|
64c289a9a2 | ||
|
|
a08897ff30 | ||
|
|
d359a814f8 | ||
|
|
4c35274b6e | ||
|
|
bf48a95344 | ||
|
|
7c3a21f732 | ||
|
|
af630be7ca | ||
|
|
dbd8efe129 | ||
|
|
3afbe836a1 | ||
|
|
d8709f2107 | ||
|
|
df7bc8200d | ||
|
|
8575972a07 | ||
|
|
40c417f9c3 | ||
|
|
7c2cf86dd9 | ||
|
|
126ed6fbdd | ||
|
|
6f4381b39d | ||
|
|
6fbbdb3512 | ||
|
|
179fb21778 | ||
|
|
6584fb23e3 | ||
|
|
d8698dffe3 | ||
|
|
bf44dc5ff5 | ||
|
|
d85b6a1544 | ||
|
|
702e618bba | ||
|
|
1029d3c301 | ||
|
|
97f552876c | ||
|
|
63c081d456 | ||
|
|
6970ab2040 | ||
|
|
e42dfb4387 | ||
|
|
ec202a26c8 | ||
|
|
f17096879c | ||
|
|
fb343a7743 | ||
|
|
a49b2d5bf8 | ||
|
|
b5d57598b6 | ||
|
|
b9d9602074 | ||
|
|
cc19f66ee1 | ||
|
|
62f90fec77 | ||
|
|
86ebb1890d | ||
|
|
dd5099ac28 | ||
|
|
c95b88d546 | ||
|
|
c217f6bd36 | ||
|
|
3314de8175 | ||
|
|
6b907bd102 | ||
|
|
3cb933ddb1 | ||
|
|
cf5362ffd1 | ||
|
|
74ac5ece6a | ||
|
|
f107708de3 | ||
|
|
4940e53d23 | ||
|
|
ab79fa440d | ||
|
|
c9b7df4113 | ||
|
|
f2df49764e | ||
|
|
77cc55656e | ||
|
|
1c85995ed7 | ||
|
|
d1543f75b6 | ||
|
|
fc0b249136 | ||
|
|
01dbc68f82 | ||
|
|
e111acad33 | ||
|
|
c61409e577 | ||
|
|
1659fb81e7 | ||
|
|
dd6c653fe9 | ||
|
|
a13e84a108 | ||
|
|
1cac3e3e40 | ||
|
|
9abe5811a5 | ||
|
|
97bd2846e9 | ||
|
|
e9244d50a7 | ||
|
|
83e5a3033e | ||
|
|
94a4c0c352 | ||
|
|
0f8693386a | ||
|
|
ed269b4467 | ||
|
|
34ddf5466f | ||
|
|
a701388cb7 | ||
|
|
29afc0412e | ||
|
|
e65a9291ef | ||
|
|
a53faff412 | ||
|
|
074cb88036 | ||
|
|
67ebb1f795 | ||
|
|
ace617037f | ||
|
|
43061b6b16 | ||
|
|
e23e976e58 | ||
|
|
0266a995aa | ||
|
|
9741e9ab8b | ||
|
|
3f31fc2874 | ||
|
|
6c50fd6de9 | ||
|
|
df43a2d3b1 | ||
|
|
35749e99e5 | ||
|
|
e965c43703 | ||
|
|
14fc726cae | ||
|
|
4f95186b53 | ||
|
|
33f44009de | ||
|
|
9d895c5ea7 | ||
|
|
0811d48a7a | ||
|
|
d8cafdf937 | ||
|
|
95190a2034 | ||
|
|
49335d54be | ||
|
|
624e448492 | ||
|
|
bf9dd6bbef | ||
|
|
6af385235d | ||
|
|
cc19387853 | ||
|
|
5922f4adce | ||
|
|
cac920d992 | ||
|
|
773850f477 | ||
|
|
9c60bc3837 | ||
|
|
fbb4dcf2b1 | ||
|
|
2ccadc7f65 |
@@ -24,7 +24,7 @@ workspace-members = [
|
||||
third-party = [
|
||||
{ name = "reqwest", version = "0.11.27" },
|
||||
# build of remote_server should not include scap / its x11 dependency
|
||||
{ name = "scap", git = "https://github.com/zed-industries/scap", rev = "808aa5c45b41e8f44729d02e38fd00a2fe2722e7" },
|
||||
{ name = "zed-scap", git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", version = "0.0.8-zed" },
|
||||
# build of remote_server should not need to include on libalsa through rodio
|
||||
{ name = "rodio", git = "https://github.com/RustAudio/rodio" },
|
||||
]
|
||||
@@ -37,8 +37,6 @@ workspace-members = [
|
||||
"zed_glsl",
|
||||
"zed_html",
|
||||
"zed_proto",
|
||||
"zed_ruff",
|
||||
"slash_commands_example",
|
||||
"zed_snippets",
|
||||
"zed_test_extension",
|
||||
]
|
||||
|
||||
2
.github/actions/run_tests/action.yml
vendored
2
.github/actions/run_tests/action.yml
vendored
@@ -20,4 +20,4 @@ runs:
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
|
||||
2
.github/actions/run_tests_windows/action.yml
vendored
2
.github/actions/run_tests_windows/action.yml
vendored
@@ -24,4 +24,4 @@ runs:
|
||||
shell: powershell
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
cargo nextest run --workspace --no-fail-fast
|
||||
cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
|
||||
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -826,8 +826,9 @@ jobs:
|
||||
timeout-minutes: 120
|
||||
name: Create a Windows installer
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
# if: (startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [windows_tests]
|
||||
env:
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
@@ -870,8 +871,7 @@ jobs:
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
# Re-enable when we are ready to publish windows preview releases
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) && env.RELEASE_CHANNEL == 'preview' }} # upload only preview
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
send_release_notes_email:
|
||||
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
33
.github/workflows/issue_response.yml
vendored
33
.github/workflows/issue_response.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: Issue Response
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 12 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
issue-response:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: "script/issue_response/pnpm-lock.yaml"
|
||||
|
||||
- run: pnpm install --dir script/issue_response
|
||||
|
||||
- name: Run Issue Response
|
||||
run: pnpm run --dir script/issue_response start
|
||||
env:
|
||||
ISSUE_RESPONSE_GITHUB_TOKEN: ${{ secrets.ISSUE_RESPONSE_GITHUB_TOKEN }}
|
||||
SLACK_ISSUE_RESPONSE_WEBHOOK_URL: ${{ secrets.SLACK_ISSUE_RESPONSE_WEBHOOK_URL }}
|
||||
1203
Cargo.lock
generated
1203
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
49
Cargo.toml
49
Cargo.toml
@@ -212,9 +212,7 @@ members = [
|
||||
"extensions/glsl",
|
||||
"extensions/html",
|
||||
"extensions/proto",
|
||||
"extensions/ruff",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/test-extension",
|
||||
|
||||
#
|
||||
@@ -275,7 +273,7 @@ cloud_llm_client = { path = "crates/cloud_llm_client" }
|
||||
cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" }
|
||||
collab = { path = "crates/collab" }
|
||||
collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections" }
|
||||
collections = { path = "crates/collections", package = "zed-collections", version = "0.1.0" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
component = { path = "crates/component" }
|
||||
@@ -291,6 +289,7 @@ debug_adapter_extension = { path = "crates/debug_adapter_extension" }
|
||||
debugger_tools = { path = "crates/debugger_tools" }
|
||||
debugger_ui = { path = "crates/debugger_ui" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
derive_refineable = { path = "crates/refineable/derive_refineable", package = "zed-derive-refineable", version = "0.1.0" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
extension = { path = "crates/extension" }
|
||||
@@ -309,10 +308,10 @@ git_ui = { path = "crates/git_ui" }
|
||||
go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui", default-features = false }
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
gpui_macros = { path = "crates/gpui_macros", package = "gpui-macros", version = "0.1.0" }
|
||||
gpui_tokio = { path = "crates/gpui_tokio" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
http_client = { path = "crates/http_client", package = "zed-http-client", version = "0.1.0" }
|
||||
http_client_tls = { path = "crates/http_client_tls" }
|
||||
icons = { path = "crates/icons" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
@@ -341,7 +340,7 @@ lsp = { path = "crates/lsp" }
|
||||
markdown = { path = "crates/markdown" }
|
||||
markdown_preview = { path = "crates/markdown_preview" }
|
||||
svg_preview = { path = "crates/svg_preview" }
|
||||
media = { path = "crates/media" }
|
||||
media = { path = "crates/media", package = "zed-media", version = "0.1.0" }
|
||||
menu = { path = "crates/menu" }
|
||||
migrator = { path = "crates/migrator" }
|
||||
mistral = { path = "crates/mistral" }
|
||||
@@ -358,7 +357,7 @@ outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
panel = { path = "crates/panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
perf = { path = "tooling/perf" }
|
||||
perf = { path = "tooling/perf", package = "zed-perf", version = "0.1.0" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
@@ -370,7 +369,7 @@ project_symbols = { path = "crates/project_symbols" }
|
||||
prompt_store = { path = "crates/prompt_store" }
|
||||
proto = { path = "crates/proto" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
refineable = { path = "crates/refineable" }
|
||||
refineable = { path = "crates/refineable", package = "zed-refineable", version = "0.1.0" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
scheduler = { path = "crates/scheduler" }
|
||||
remote = { path = "crates/remote" }
|
||||
@@ -383,7 +382,7 @@ rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rules_library = { path = "crates/rules_library" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
semantic_version = { path = "crates/semantic_version", package = "zed-semantic-version", version = "0.1.0" }
|
||||
session = { path = "crates/session" }
|
||||
settings = { path = "crates/settings" }
|
||||
settings_macros = { path = "crates/settings_macros" }
|
||||
@@ -396,7 +395,7 @@ sqlez_macros = { path = "crates/sqlez_macros" }
|
||||
story = { path = "crates/story" }
|
||||
storybook = { path = "crates/storybook" }
|
||||
streaming_diff = { path = "crates/streaming_diff" }
|
||||
sum_tree = { path = "crates/sum_tree" }
|
||||
sum_tree = { path = "crates/sum_tree", package = "zed-sum-tree", version = "0.1.0" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
system_specs = { path = "crates/system_specs" }
|
||||
@@ -419,8 +418,8 @@ ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
ui_prompt = { path = "crates/ui_prompt" }
|
||||
util = { path = "crates/util" }
|
||||
util_macros = { path = "crates/util_macros" }
|
||||
util = { path = "crates/util", package = "zed-util", version = "0.1.0" }
|
||||
util_macros = { path = "crates/util_macros", package = "zed-util-macros", version = "0.1.0" }
|
||||
vercel = { path = "crates/vercel" }
|
||||
vim = { path = "crates/vim" }
|
||||
vim_mode_setting = { path = "crates/vim_mode_setting" }
|
||||
@@ -474,9 +473,9 @@ backtrace = "0.3"
|
||||
base64 = "0.22"
|
||||
bincode = "1.2.1"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
@@ -550,6 +549,7 @@ nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
num-traits = "0.2"
|
||||
objc = "0.2"
|
||||
objc2-foundation = { version = "0.3", default-features = false, features = [
|
||||
"NSArray",
|
||||
@@ -606,7 +606,8 @@ rand = "0.9"
|
||||
rayon = "1.8"
|
||||
ref-cast = "1.0.24"
|
||||
regex = "1.5"
|
||||
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "951c770a32f1998d6e999cef3e59e0013e6c4415", default-features = false, features = [
|
||||
# WARNING: If you change this, you must also publish a new version of zed-reqwest to crates.io
|
||||
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662463bda39148ba154100dd44d3fba5873a4", default-features = false, features = [
|
||||
"charset",
|
||||
"http2",
|
||||
"macos-system-configuration",
|
||||
@@ -614,17 +615,17 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "951c77
|
||||
"rustls-tls-native-roots",
|
||||
"socks",
|
||||
"stream",
|
||||
] }
|
||||
], package = "zed-reqwest", version = "0.12.15-zed" }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rustc-hash = "2.1.0"
|
||||
rustls = { version = "0.23.26" }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "808aa5c45b41e8f44729d02e38fd00a2fe2722e7", default-features = false }
|
||||
# WARNING: If you change this, you must also publish a new version of zed-scap to crates.io
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", default-features = false, package = "zed-scap", version = "0.0.8-zed" }
|
||||
schemars = { version = "1.0", features = ["indexmap2"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0.221", features = ["derive", "rc"] }
|
||||
@@ -650,7 +651,7 @@ streaming-iterator = "0.1"
|
||||
strsim = "0.11"
|
||||
strum = { version = "0.27.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
syn = { version = "2.0.101", features = ["full", "extra-traits"] }
|
||||
syn = { version = "2.0.101", features = ["full", "extra-traits", "visit-mut"] }
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
@@ -668,6 +669,7 @@ tiny_http = "0.8"
|
||||
tokio = { version = "1" }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
|
||||
toml = "0.8"
|
||||
toml_edit = { version = "0.22", default-features = false, features = ["display", "parse", "serde"] }
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.10", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.0"
|
||||
@@ -714,7 +716,6 @@ wasmtime = { version = "29", default-features = false, features = [
|
||||
wasmtime-wasi = "29"
|
||||
which = "6.0.0"
|
||||
windows-core = "0.61"
|
||||
windows-sys = "0.61"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
yawc = "0.2.5"
|
||||
@@ -803,7 +804,7 @@ wasmtime = { opt-level = 3 }
|
||||
activity_indicator = { codegen-units = 1 }
|
||||
assets = { codegen-units = 1 }
|
||||
breadcrumbs = { codegen-units = 1 }
|
||||
collections = { codegen-units = 1 }
|
||||
zed-collections = { codegen-units = 1 }
|
||||
command_palette = { codegen-units = 1 }
|
||||
command_palette_hooks = { codegen-units = 1 }
|
||||
extension_cli = { codegen-units = 1 }
|
||||
@@ -823,11 +824,11 @@ outline = { codegen-units = 1 }
|
||||
paths = { codegen-units = 1 }
|
||||
prettier = { codegen-units = 1 }
|
||||
project_symbols = { codegen-units = 1 }
|
||||
refineable = { codegen-units = 1 }
|
||||
zed-refineable = { codegen-units = 1 }
|
||||
release_channel = { codegen-units = 1 }
|
||||
reqwest_client = { codegen-units = 1 }
|
||||
rich_text = { codegen-units = 1 }
|
||||
semantic_version = { codegen-units = 1 }
|
||||
zed-semantic-version = { codegen-units = 1 }
|
||||
session = { codegen-units = 1 }
|
||||
snippet = { codegen-units = 1 }
|
||||
snippets_ui = { codegen-units = 1 }
|
||||
|
||||
3
assets/icons/paperclip.svg
Normal file
3
assets/icons/paperclip.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.1645 4.45825L5.20344 9.52074C4.98225 9.74193 4.85798 10.0419 4.85798 10.3548C4.85798 10.6676 4.98225 10.9676 5.20344 11.1888C5.42464 11.41 5.72464 11.5342 6.03746 11.5342C6.35028 11.5342 6.65028 11.41 6.87148 11.1888L11.8326 6.12629C12.2749 5.68397 12.5234 5.08407 12.5234 4.45854C12.5234 3.83302 12.2749 3.23311 11.8326 2.7908C11.3902 2.34849 10.7903 2.1 10.1648 2.1C9.53928 2.1 8.93938 2.34849 8.49707 2.7908L3.55663 7.83265C3.22373 8.16017 2.95897 8.55037 2.77762 8.98072C2.59628 9.41108 2.50193 9.87308 2.50003 10.3401C2.49813 10.8071 2.58871 11.2698 2.76654 11.7017C2.94438 12.1335 3.20595 12.5258 3.53618 12.856C3.8664 13.1863 4.25873 13.4478 4.69055 13.6257C5.12237 13.8035 5.58513 13.8941 6.05213 13.8922C6.51913 13.8903 6.98114 13.7959 7.41149 13.6146C7.84185 13.4332 8.23204 13.1685 8.55957 12.8356L13.5 7.79373" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
@@ -30,7 +30,8 @@
|
||||
"ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-,": "zed::OpenSettingsEditor",
|
||||
"ctrl-alt-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"shift-f5": "debugger::Stop",
|
||||
@@ -250,7 +251,7 @@
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"ctrl-y": "agent::AllowOnce",
|
||||
"ctrl-alt-y": "agent::AllowAlways",
|
||||
"ctrl-d": "agent::RejectOnce"
|
||||
"ctrl-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -369,7 +370,15 @@
|
||||
"bindings": {
|
||||
"new": "rules_library::NewRule",
|
||||
"ctrl-n": "rules_library::NewRule",
|
||||
"ctrl-shift-s": "rules_library::ToggleDefaultRule"
|
||||
"ctrl-shift-s": "rules_library::ToggleDefaultRule",
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -39,7 +39,8 @@
|
||||
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"cmd-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"cmd-,": "zed::OpenSettings",
|
||||
"cmd-,": "zed::OpenSettingsEditor",
|
||||
"cmd-alt-,": "zed::OpenSettings",
|
||||
"cmd-q": "zed::Quit",
|
||||
"cmd-h": "zed::Hide",
|
||||
"alt-cmd-h": "zed::HideOthers",
|
||||
@@ -289,7 +290,7 @@
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"cmd-y": "agent::AllowOnce",
|
||||
"cmd-alt-y": "agent::AllowAlways",
|
||||
"cmd-d": "agent::RejectOnce"
|
||||
"cmd-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -430,6 +431,13 @@
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
|
||||
@@ -29,7 +29,8 @@
|
||||
"ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-,": "zed::OpenSettingsEditor",
|
||||
"ctrl-alt-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"shift-f5": "debugger::Stop",
|
||||
@@ -251,7 +252,7 @@
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"ctrl-y": "agent::AllowOnce",
|
||||
"ctrl-alt-y": "agent::AllowAlways",
|
||||
"ctrl-d": "agent::RejectOnce"
|
||||
"ctrl-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -378,7 +379,15 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-n": "rules_library::NewRule",
|
||||
"ctrl-shift-s": "rules_library::ToggleDefaultRule"
|
||||
"ctrl-shift-s": "rules_library::ToggleDefaultRule",
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -240,7 +240,7 @@
|
||||
"delete": "vim::DeleteRight",
|
||||
"g shift-j": "vim::JoinLinesNoWhitespace",
|
||||
"y": "vim::PushYank",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"shift-y": "vim::YankLine",
|
||||
"x": "vim::DeleteRight",
|
||||
"shift-x": "vim::DeleteLeft",
|
||||
"ctrl-a": "vim::Increment",
|
||||
@@ -393,7 +393,7 @@
|
||||
"escape": "editor::Cancel",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"shift-y": "vim::YankLine",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"shift-a": "vim::InsertEndOfLine",
|
||||
"o": "vim::InsertLineBelow",
|
||||
@@ -884,10 +884,12 @@
|
||||
"/": "project_panel::NewSearchInDirectory",
|
||||
"d": "project_panel::NewDirectory",
|
||||
"enter": "project_panel::OpenPermanent",
|
||||
"escape": "project_panel::ToggleFocus",
|
||||
"escape": "vim::ToggleProjectPanelFocus",
|
||||
"h": "project_panel::CollapseSelectedEntry",
|
||||
"j": "menu::SelectNext",
|
||||
"k": "menu::SelectPrevious",
|
||||
"j": "vim::MenuSelectNext",
|
||||
"k": "vim::MenuSelectPrevious",
|
||||
"down": "vim::MenuSelectNext",
|
||||
"up": "vim::MenuSelectPrevious",
|
||||
"l": "project_panel::ExpandSelectedEntry",
|
||||
"shift-d": "project_panel::Delete",
|
||||
"shift-r": "project_panel::Rename",
|
||||
@@ -906,7 +908,22 @@
|
||||
"{": "project_panel::SelectPrevDirectory",
|
||||
"shift-g": "menu::SelectLast",
|
||||
"g g": "menu::SelectFirst",
|
||||
"-": "project_panel::SelectParent"
|
||||
"-": "project_panel::SelectParent",
|
||||
"ctrl-u": "project_panel::ScrollUp",
|
||||
"ctrl-d": "project_panel::ScrollDown",
|
||||
"z t": "project_panel::ScrollCursorTop",
|
||||
"z z": "project_panel::ScrollCursorCenter",
|
||||
"z b": "project_panel::ScrollCursorBottom",
|
||||
"0": ["vim::Number", 0],
|
||||
"1": ["vim::Number", 1],
|
||||
"2": ["vim::Number", 2],
|
||||
"3": ["vim::Number", 3],
|
||||
"4": ["vim::Number", 4],
|
||||
"5": ["vim::Number", 5],
|
||||
"6": ["vim::Number", 6],
|
||||
"7": ["vim::Number", 7],
|
||||
"8": ["vim::Number", 8],
|
||||
"9": ["vim::Number", 9]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -29,7 +29,9 @@ Generate {{content_type}} based on the following prompt:
|
||||
|
||||
Match the indentation in the original file in the inserted {{content_type}}, don't include any indentation on blank lines.
|
||||
|
||||
Immediately start with the following format with no remarks:
|
||||
Return ONLY the {{content_type}} to insert. Do NOT include any XML tags like <document>, <insert_here>, or any surrounding markup from the input.
|
||||
|
||||
Respond with a code block containing the {{content_type}} to insert. Replace \{{INSERTED_CODE}} with your actual {{content_type}}:
|
||||
|
||||
```
|
||||
\{{INSERTED_CODE}}
|
||||
@@ -66,7 +68,9 @@ Only make changes that are necessary to fulfill the prompt, leave everything els
|
||||
|
||||
Start at the indentation level in the original file in the rewritten {{content_type}}. Don't stop until you've rewritten the entire section, even if you have no more changes to make, always write out the whole section with no unnecessary elisions.
|
||||
|
||||
Immediately start with the following format with no remarks:
|
||||
Return ONLY the rewritten {{content_type}}. Do NOT include any XML tags like <document>, <rewrite_this>, or any surrounding markup from the input.
|
||||
|
||||
Respond with a code block containing the rewritten {{content_type}}. Replace \{{REWRITTEN_CODE}} with your actual rewritten {{content_type}}:
|
||||
|
||||
```
|
||||
\{{REWRITTEN_CODE}}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
{
|
||||
"project_name": null,
|
||||
/// The displayed name of this project. If not set or empty, the root directory name
|
||||
/// will be displayed.
|
||||
"project_name": "",
|
||||
// The name of the Zed theme to use for the UI.
|
||||
//
|
||||
// `mode` is one of:
|
||||
@@ -72,8 +74,10 @@
|
||||
"ui_font_weight": 400,
|
||||
// The default font size for text in the UI
|
||||
"ui_font_size": 16,
|
||||
// The default font size for text in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_font_size": null,
|
||||
// The default font size for agent responses in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_ui_font_size": null,
|
||||
// The default font size for user messages in the agent panel. Falls back to the buffer font size if unset.
|
||||
"agent_buffer_font_size": 12,
|
||||
// How much to fade out unused code.
|
||||
"unnecessary_code_fade": 0.3,
|
||||
// Active pane styling settings.
|
||||
@@ -1227,6 +1231,10 @@
|
||||
// 2. Hide the gutter
|
||||
// "git_gutter": "hide"
|
||||
"git_gutter": "tracked_files",
|
||||
/// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter.
|
||||
///
|
||||
/// Default: null
|
||||
"gutter_debounce": null,
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
@@ -1242,6 +1250,9 @@
|
||||
// The minimum column number to show the inline blame information at
|
||||
"min_column": 0
|
||||
},
|
||||
"blame": {
|
||||
"show_avatar": true
|
||||
},
|
||||
// Control which information is shown in the branch picker.
|
||||
"branch_picker": {
|
||||
"show_author_name": true
|
||||
@@ -1322,6 +1333,8 @@
|
||||
},
|
||||
// Status bar-related settings.
|
||||
"status_bar": {
|
||||
// Whether to show the status bar.
|
||||
"experimental.show": true,
|
||||
// Whether to show the active language button in the status bar.
|
||||
"active_language_button": true,
|
||||
// Whether to show the cursor position button in the status bar.
|
||||
@@ -1557,6 +1570,14 @@
|
||||
"auto_install_extensions": {
|
||||
"html": true
|
||||
},
|
||||
// The capabilities granted to extensions.
|
||||
//
|
||||
// This list can be customized to restrict what extensions are able to do.
|
||||
"granted_extension_capabilities": [
|
||||
{ "kind": "process:exec", "command": "*", "args": ["**"] },
|
||||
{ "kind": "download_file", "host": "*", "path": ["**"] },
|
||||
{ "kind": "npm:install", "package": "*" }
|
||||
],
|
||||
// Controls how completions are processed for this language.
|
||||
"completions": {
|
||||
// Controls how words are completed.
|
||||
@@ -1855,21 +1876,19 @@
|
||||
// Allows to enable/disable formatting with Prettier
|
||||
// and configure default Prettier, used when no project-level Prettier installation is found.
|
||||
"prettier": {
|
||||
// // Whether to consider prettier formatter or not when attempting to format a file.
|
||||
"allowed": false
|
||||
//
|
||||
// // Use regular Prettier json configuration.
|
||||
// // If Prettier is allowed, Zed will use this for its Prettier instance for any applicable file, if
|
||||
// // the project has no other Prettier installed.
|
||||
// "plugins": [],
|
||||
//
|
||||
// // Use regular Prettier json configuration.
|
||||
// // If Prettier is allowed, Zed will use this for its Prettier instance for any applicable file, if
|
||||
// // the project has no other Prettier installed.
|
||||
// Enables or disables formatting with Prettier for any given language.
|
||||
"allowed": false,
|
||||
// Forces Prettier integration to use a specific parser name when formatting files with the language.
|
||||
"plugins": [],
|
||||
// Default Prettier options, in the format as in package.json section for Prettier.
|
||||
// If project installs Prettier via its package.json, these options will be ignored.
|
||||
// "trailingComma": "es5",
|
||||
// "tabWidth": 4,
|
||||
// "semi": false,
|
||||
// "singleQuote": true
|
||||
// Forces Prettier integration to use a specific parser name when formatting files with the language
|
||||
// when set to a non-empty string.
|
||||
"parser": ""
|
||||
},
|
||||
// Settings for auto-closing of JSX tags.
|
||||
"jsx_tag_auto_close": {
|
||||
@@ -2019,7 +2038,7 @@
|
||||
// Examples:
|
||||
// "profiles": {
|
||||
// "Presenting": {
|
||||
// "agent_font_size": 20.0,
|
||||
// "agent_ui_font_size": 20.0,
|
||||
// "buffer_font_size": 20.0,
|
||||
// "theme": "One Light",
|
||||
// "ui_font_size": 20.0
|
||||
|
||||
@@ -192,7 +192,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"comment": {
|
||||
"color": "#abb5be8c",
|
||||
"color": "#5c6773ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -583,7 +583,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"comment": {
|
||||
"color": "#787b8099",
|
||||
"color": "#abb0b6ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -630,7 +630,7 @@
|
||||
"hint": {
|
||||
"color": "#8ca7c2ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fa8d3eff",
|
||||
@@ -974,7 +974,7 @@
|
||||
"font_weight": null
|
||||
},
|
||||
"comment": {
|
||||
"color": "#b8cfe680",
|
||||
"color": "#5c6773ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
@@ -1021,7 +1021,7 @@
|
||||
"hint": {
|
||||
"color": "#7399a3ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#ffad65ff",
|
||||
|
||||
@@ -653,7 +653,7 @@
|
||||
"hint": {
|
||||
"color": "#8c957dff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fb4833ff",
|
||||
@@ -1058,7 +1058,7 @@
|
||||
"hint": {
|
||||
"color": "#8c957dff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fb4833ff",
|
||||
@@ -1463,7 +1463,7 @@
|
||||
"hint": {
|
||||
"color": "#677562ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#9d0006ff",
|
||||
@@ -1868,7 +1868,7 @@
|
||||
"hint": {
|
||||
"color": "#677562ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#9d0006ff",
|
||||
@@ -2273,7 +2273,7 @@
|
||||
"hint": {
|
||||
"color": "#677562ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#9d0006ff",
|
||||
|
||||
@@ -643,7 +643,7 @@
|
||||
"hint": {
|
||||
"color": "#7274a7ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#a449abff",
|
||||
|
||||
@@ -3,6 +3,7 @@ mod diff;
|
||||
mod mention;
|
||||
mod terminal;
|
||||
|
||||
use ::terminal::terminal_settings::TerminalSettings;
|
||||
use agent_settings::AgentSettings;
|
||||
use collections::HashSet;
|
||||
pub use connection::*;
|
||||
@@ -11,7 +12,7 @@ use language::language_settings::FormatOnSave;
|
||||
pub use mention::*;
|
||||
use project::lsp_store::{FormatTrigger, LspFormatTarget};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings as _;
|
||||
use settings::{Settings as _, SettingsLocation};
|
||||
use task::{Shell, ShellBuilder};
|
||||
pub use terminal::*;
|
||||
|
||||
@@ -34,7 +35,7 @@ use std::rc::Rc;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{fmt::Display, mem, path::PathBuf, sync::Arc};
|
||||
use ui::App;
|
||||
use util::{ResultExt, get_default_system_shell};
|
||||
use util::{ResultExt, get_default_system_shell_preferring_bash};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -787,6 +788,8 @@ pub struct AcpThread {
|
||||
prompt_capabilities: acp::PromptCapabilities,
|
||||
_observe_prompt_capabilities: Task<anyhow::Result<()>>,
|
||||
terminals: HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
pending_terminal_output: HashMap<acp::TerminalId, Vec<Vec<u8>>>,
|
||||
pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -809,6 +812,126 @@ pub enum AcpThreadEvent {
|
||||
|
||||
impl EventEmitter<AcpThreadEvent> for AcpThread {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TerminalProviderEvent {
|
||||
Created {
|
||||
terminal_id: acp::TerminalId,
|
||||
label: String,
|
||||
cwd: Option<PathBuf>,
|
||||
output_byte_limit: Option<u64>,
|
||||
terminal: Entity<::terminal::Terminal>,
|
||||
},
|
||||
Output {
|
||||
terminal_id: acp::TerminalId,
|
||||
data: Vec<u8>,
|
||||
},
|
||||
TitleChanged {
|
||||
terminal_id: acp::TerminalId,
|
||||
title: String,
|
||||
},
|
||||
Exit {
|
||||
terminal_id: acp::TerminalId,
|
||||
status: acp::TerminalExitStatus,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TerminalProviderCommand {
|
||||
WriteInput {
|
||||
terminal_id: acp::TerminalId,
|
||||
bytes: Vec<u8>,
|
||||
},
|
||||
Resize {
|
||||
terminal_id: acp::TerminalId,
|
||||
cols: u16,
|
||||
rows: u16,
|
||||
},
|
||||
Close {
|
||||
terminal_id: acp::TerminalId,
|
||||
},
|
||||
}
|
||||
|
||||
impl AcpThread {
|
||||
pub fn on_terminal_provider_event(
|
||||
&mut self,
|
||||
event: TerminalProviderEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id,
|
||||
label,
|
||||
cwd,
|
||||
output_byte_limit,
|
||||
terminal,
|
||||
} => {
|
||||
let entity = self.register_terminal_created(
|
||||
terminal_id.clone(),
|
||||
label,
|
||||
cwd,
|
||||
output_byte_limit,
|
||||
terminal,
|
||||
cx,
|
||||
);
|
||||
|
||||
if let Some(mut chunks) = self.pending_terminal_output.remove(&terminal_id) {
|
||||
for data in chunks.drain(..) {
|
||||
entity.update(cx, |term, cx| {
|
||||
term.inner().update(cx, |inner, cx| {
|
||||
inner.write_output(&data, cx);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(_status) = self.pending_terminal_exit.remove(&terminal_id) {
|
||||
entity.update(cx, |_term, cx| {
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
TerminalProviderEvent::Output { terminal_id, data } => {
|
||||
if let Some(entity) = self.terminals.get(&terminal_id) {
|
||||
entity.update(cx, |term, cx| {
|
||||
term.inner().update(cx, |inner, cx| {
|
||||
inner.write_output(&data, cx);
|
||||
})
|
||||
});
|
||||
} else {
|
||||
self.pending_terminal_output
|
||||
.entry(terminal_id)
|
||||
.or_default()
|
||||
.push(data);
|
||||
}
|
||||
}
|
||||
TerminalProviderEvent::TitleChanged { terminal_id, title } => {
|
||||
if let Some(entity) = self.terminals.get(&terminal_id) {
|
||||
entity.update(cx, |term, cx| {
|
||||
term.inner().update(cx, |inner, cx| {
|
||||
inner.breadcrumb_text = title;
|
||||
cx.emit(::terminal::Event::BreadcrumbsChanged);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id,
|
||||
status,
|
||||
} => {
|
||||
if let Some(entity) = self.terminals.get(&terminal_id) {
|
||||
entity.update(cx, |_term, cx| {
|
||||
cx.notify();
|
||||
});
|
||||
} else {
|
||||
self.pending_terminal_exit.insert(terminal_id, status);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub enum ThreadStatus {
|
||||
Idle,
|
||||
@@ -886,6 +1009,8 @@ impl AcpThread {
|
||||
prompt_capabilities,
|
||||
_observe_prompt_capabilities: task,
|
||||
terminals: HashMap::default(),
|
||||
pending_terminal_output: HashMap::default(),
|
||||
pending_terminal_exit: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1961,16 +2086,24 @@ impl AcpThread {
|
||||
) -> Task<Result<Entity<Terminal>>> {
|
||||
let env = match &cwd {
|
||||
Some(dir) => self.project.update(cx, |project, cx| {
|
||||
project.directory_environment(dir.as_path().into(), cx)
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.as_path().into(), cx)
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
|
||||
let env = cx.spawn(async move |_, _| {
|
||||
let mut env = env.await.unwrap_or_default();
|
||||
if cfg!(unix) {
|
||||
env.insert("PAGER".into(), "cat".into());
|
||||
}
|
||||
// Disables paging for `git` and hopefully other commands
|
||||
env.insert("PAGER".into(), "".into());
|
||||
for var in extra_env {
|
||||
env.insert(var.name, var.value);
|
||||
}
|
||||
@@ -1985,18 +2118,16 @@ impl AcpThread {
|
||||
let terminal_id = terminal_id.clone();
|
||||
async move |_this, cx| {
|
||||
let env = env.await;
|
||||
let (task_command, task_args) = ShellBuilder::new(
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})?
|
||||
.as_deref(),
|
||||
&Shell::Program(get_default_system_shell()),
|
||||
)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let shell = project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})?
|
||||
.unwrap_or_else(|| get_default_system_shell_preferring_bash());
|
||||
let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let terminal = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_terminal_task(
|
||||
@@ -2079,6 +2210,32 @@ impl AcpThread {
|
||||
pub fn emit_load_error(&mut self, error: LoadError, cx: &mut Context<Self>) {
|
||||
cx.emit(AcpThreadEvent::LoadError(error));
|
||||
}
|
||||
|
||||
pub fn register_terminal_created(
|
||||
&mut self,
|
||||
terminal_id: acp::TerminalId,
|
||||
command_label: String,
|
||||
working_dir: Option<PathBuf>,
|
||||
output_byte_limit: Option<u64>,
|
||||
terminal: Entity<::terminal::Terminal>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<Terminal> {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
|
||||
let entity = cx.new(|cx| {
|
||||
Terminal::new(
|
||||
terminal_id.clone(),
|
||||
&command_label,
|
||||
working_dir.clone(),
|
||||
output_byte_limit.map(|l| l as usize),
|
||||
terminal,
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
self.terminals.insert(terminal_id.clone(), entity.clone());
|
||||
entity
|
||||
}
|
||||
}
|
||||
|
||||
fn markdown_for_raw_output(
|
||||
@@ -2155,6 +2312,145 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_terminal_output_buffered_before_created_renders(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, std::path::Path::new(path!("/test")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
|
||||
// Send Output BEFORE Created - should be buffered by acp_thread
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data: b"hello buffered".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Create a display-only terminal and then send Created
|
||||
let lower = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: "Buffered Test".to_string(),
|
||||
cwd: None,
|
||||
output_byte_limit: None,
|
||||
terminal: lower.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// After Created, buffered Output should have been flushed into the renderer
|
||||
let content = thread.read_with(cx, |thread, cx| {
|
||||
let term = thread.terminal(terminal_id.clone()).unwrap();
|
||||
term.read_with(cx, |t, cx| t.inner().read(cx).get_content())
|
||||
});
|
||||
|
||||
assert!(
|
||||
content.contains("hello buffered"),
|
||||
"expected buffered output to render, got: {content}"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_terminal_output_and_exit_buffered_before_created(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, std::path::Path::new(path!("/test")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
|
||||
// Send Output BEFORE Created
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data: b"pre-exit data".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Send Exit BEFORE Created
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Now create a display-only lower-level terminal and send Created
|
||||
let lower = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: "Buffered Exit Test".to_string(),
|
||||
cwd: None,
|
||||
output_byte_limit: None,
|
||||
terminal: lower.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Output should be present after Created (flushed from buffer)
|
||||
let content = thread.read_with(cx, |thread, cx| {
|
||||
let term = thread.terminal(terminal_id.clone()).unwrap();
|
||||
term.read_with(cx, |t, cx| t.inner().read(cx).get_content())
|
||||
});
|
||||
|
||||
assert!(
|
||||
content.contains("pre-exit data"),
|
||||
"expected pre-exit data to render, got: {content}"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_push_user_content_block(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -31,7 +31,7 @@ impl Diff {
|
||||
let buffer = new_buffer.clone();
|
||||
async move |_, cx| {
|
||||
let language = language_registry
|
||||
.language_for_file_path(Path::new(&path))
|
||||
.load_language_for_file_path(Path::new(&path))
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
|
||||
@@ -3276,7 +3276,6 @@ mod tests {
|
||||
use settings::{LanguageModelParameters, Settings, SettingsStore};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use theme::ThemeSettings;
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -5337,7 +5336,7 @@ fn main() {{
|
||||
thread_store::init(fs.clone(), cx);
|
||||
workspace::init_settings(cx);
|
||||
language_model::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
ToolRegistry::default_global(cx);
|
||||
assistant_tool::init(cx);
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ use std::{
|
||||
cell::{Ref, RefCell},
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
sync::{Arc, Mutex},
|
||||
sync::{Arc, LazyLock, Mutex},
|
||||
};
|
||||
use util::{ResultExt as _, rel_path::RelPath};
|
||||
|
||||
@@ -74,17 +74,19 @@ impl Column for DataType {
|
||||
}
|
||||
}
|
||||
|
||||
const RULES_FILE_NAMES: [&str; 9] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
"GEMINI.md",
|
||||
];
|
||||
static RULES_FILE_NAMES: LazyLock<[&RelPath; 9]> = LazyLock::new(|| {
|
||||
[
|
||||
RelPath::unix(".rules").unwrap(),
|
||||
RelPath::unix(".cursorrules").unwrap(),
|
||||
RelPath::unix(".windsurfrules").unwrap(),
|
||||
RelPath::unix(".clinerules").unwrap(),
|
||||
RelPath::unix(".github/copilot-instructions.md").unwrap(),
|
||||
RelPath::unix("CLAUDE.md").unwrap(),
|
||||
RelPath::unix("AGENT.md").unwrap(),
|
||||
RelPath::unix("AGENTS.md").unwrap(),
|
||||
RelPath::unix("GEMINI.md").unwrap(),
|
||||
]
|
||||
});
|
||||
|
||||
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
ThreadsDatabase::init(fs, cx);
|
||||
@@ -232,11 +234,10 @@ impl ThreadStore {
|
||||
self.enqueue_system_prompt_reload();
|
||||
}
|
||||
project::Event::WorktreeUpdatedEntries(_, items) => {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
if items
|
||||
.iter()
|
||||
.any(|(path, _, _)| RULES_FILE_NAMES.iter().any(|name| path.as_ref() == *name))
|
||||
{
|
||||
self.enqueue_system_prompt_reload();
|
||||
}
|
||||
}
|
||||
@@ -368,7 +369,7 @@ impl ThreadStore {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
|
||||
@@ -25,21 +25,23 @@ use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use util::ResultExt;
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
const RULES_FILE_NAMES: [&str; 9] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
"GEMINI.md",
|
||||
];
|
||||
static RULES_FILE_NAMES: LazyLock<[&RelPath; 9]> = LazyLock::new(|| {
|
||||
[
|
||||
RelPath::unix(".rules").unwrap(),
|
||||
RelPath::unix(".cursorrules").unwrap(),
|
||||
RelPath::unix(".windsurfrules").unwrap(),
|
||||
RelPath::unix(".clinerules").unwrap(),
|
||||
RelPath::unix(".github/copilot-instructions.md").unwrap(),
|
||||
RelPath::unix("CLAUDE.md").unwrap(),
|
||||
RelPath::unix("AGENT.md").unwrap(),
|
||||
RelPath::unix("AGENTS.md").unwrap(),
|
||||
RelPath::unix("GEMINI.md").unwrap(),
|
||||
]
|
||||
});
|
||||
|
||||
pub struct RulesLoadingError {
|
||||
pub message: SharedString,
|
||||
@@ -475,7 +477,7 @@ impl NativeAgent {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
@@ -556,11 +558,10 @@ impl NativeAgent {
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
project::Event::WorktreeUpdatedEntries(_, items) => {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
if items
|
||||
.iter()
|
||||
.any(|(path, _, _)| RULES_FILE_NAMES.iter().any(|name| path.as_ref() == *name))
|
||||
{
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +47,8 @@ task.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
terminal.workspace = true
|
||||
uuid.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -9,6 +9,7 @@ use futures::io::BufReader;
|
||||
use project::Project;
|
||||
use project::agent_server_store::AgentServerCommand;
|
||||
use serde::Deserialize;
|
||||
use task::Shell;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use std::path::PathBuf;
|
||||
@@ -19,7 +20,9 @@ use thiserror::Error;
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString, Task, WeakEntity};
|
||||
|
||||
use acp_thread::{AcpThread, AuthRequired, LoadError};
|
||||
use acp_thread::{AcpThread, AuthRequired, LoadError, TerminalProviderEvent};
|
||||
use terminal::TerminalBuilder;
|
||||
use terminal::terminal_settings::{AlternateScroll, CursorShape};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("Unsupported version")]
|
||||
@@ -79,7 +82,7 @@ impl AcpConnection {
|
||||
is_remote: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let mut child = util::command::new_smol_command(command.path);
|
||||
let mut child = util::command::new_smol_command(&command.path);
|
||||
child
|
||||
.args(command.args.iter().map(|arg| arg.as_str()))
|
||||
.envs(command.env.iter().flatten())
|
||||
@@ -94,6 +97,11 @@ impl AcpConnection {
|
||||
let stdout = child.stdout.take().context("Failed to take stdout")?;
|
||||
let stdin = child.stdin.take().context("Failed to take stdin")?;
|
||||
let stderr = child.stderr.take().context("Failed to take stderr")?;
|
||||
log::info!(
|
||||
"Spawning external agent server: {:?}, {:?}",
|
||||
command.path,
|
||||
command.args
|
||||
);
|
||||
log::trace!("Spawned (pid: {})", child.id());
|
||||
|
||||
let sessions = Rc::new(RefCell::new(HashMap::default()));
|
||||
@@ -380,6 +388,10 @@ impl AgentConnection for AcpConnection {
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Err(err) => {
|
||||
if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
|
||||
return Err(anyhow!(acp::Error::auth_required()));
|
||||
}
|
||||
|
||||
if err.code != ErrorCode::INTERNAL_ERROR.code {
|
||||
anyhow::bail!(err)
|
||||
}
|
||||
@@ -696,10 +708,100 @@ impl acp::Client for ClientDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
// Clone so we can inspect meta both before and after handing off to the thread
|
||||
let update_clone = notification.update.clone();
|
||||
|
||||
// Pre-handle: if a ToolCall carries terminal_info, create/register a display-only terminal.
|
||||
if let acp::SessionUpdate::ToolCall(tc) = &update_clone {
|
||||
if let Some(meta) = &tc.meta {
|
||||
if let Some(terminal_info) = meta.get("terminal_info") {
|
||||
if let Some(id_str) = terminal_info.get("terminal_id").and_then(|v| v.as_str())
|
||||
{
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let cwd = terminal_info
|
||||
.get("cwd")
|
||||
.and_then(|v| v.as_str().map(PathBuf::from));
|
||||
|
||||
// Create a minimal display-only lower-level terminal and register it.
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
let builder = TerminalBuilder::new_display_only(
|
||||
CursorShape::default(),
|
||||
AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)?;
|
||||
let lower = cx.new(|cx| builder.subscribe(cx));
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: tc.title.clone(),
|
||||
cwd,
|
||||
output_byte_limit: None,
|
||||
terminal: lower,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
anyhow::Ok(())
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Forward the update to the acp_thread as usual.
|
||||
session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.handle_session_update(notification.update, cx)
|
||||
thread.handle_session_update(notification.update.clone(), cx)
|
||||
})??;
|
||||
|
||||
// Post-handle: stream terminal output/exit if present on ToolCallUpdate meta.
|
||||
if let acp::SessionUpdate::ToolCallUpdate(tcu) = &update_clone {
|
||||
if let Some(meta) = &tcu.meta {
|
||||
if let Some(term_out) = meta.get("terminal_output") {
|
||||
if let Some(id_str) = term_out.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
if let Some(s) = term_out.get("data").and_then(|v| v.as_str()) {
|
||||
let data = s.as_bytes().to_vec();
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// terminal_exit
|
||||
if let Some(term_exit) = meta.get("terminal_exit") {
|
||||
if let Some(id_str) = term_exit.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let status = acp::TerminalExitStatus {
|
||||
exit_code: term_exit
|
||||
.get("exit_code")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|i| i as u32),
|
||||
signal: term_exit
|
||||
.get("signal")
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string())),
|
||||
meta: None,
|
||||
};
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id.clone(),
|
||||
status,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -707,25 +809,68 @@ impl acp::Client for ClientDelegate {
|
||||
&self,
|
||||
args: acp::CreateTerminalRequest,
|
||||
) -> Result<acp::CreateTerminalResponse, acp::Error> {
|
||||
let terminal = self
|
||||
.session_thread(&args.session_id)?
|
||||
.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.create_terminal(
|
||||
args.command,
|
||||
args.args,
|
||||
args.env,
|
||||
args.cwd,
|
||||
args.output_byte_limit,
|
||||
let thread = self.session_thread(&args.session_id)?;
|
||||
let project = thread.read_with(&self.cx, |thread, _cx| thread.project().clone())?;
|
||||
|
||||
let mut env = if let Some(dir) = &args.cwd {
|
||||
project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
project.directory_environment(&task::Shell::System, dir.clone().into(), cx)
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
for var in args.env {
|
||||
env.insert(var.name, var.value);
|
||||
}
|
||||
|
||||
// Use remote shell or default system shell, as appropriate
|
||||
let shell = project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
.map(Shell::Program)
|
||||
})?
|
||||
.unwrap_or(task::Shell::System);
|
||||
let (task_command, task_args) = task::ShellBuilder::new(&shell)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(args.command.clone()), &args.args);
|
||||
|
||||
let terminal_entity = project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
project.create_terminal_task(
|
||||
task::SpawnInTerminal {
|
||||
command: Some(task_command),
|
||||
args: task_args,
|
||||
cwd: args.cwd.clone(),
|
||||
env,
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
Ok(
|
||||
terminal.read_with(&self.cx, |terminal, _| acp::CreateTerminalResponse {
|
||||
terminal_id: terminal.id().clone(),
|
||||
meta: None,
|
||||
})?,
|
||||
)
|
||||
|
||||
// Register with renderer
|
||||
let terminal_entity = thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.register_terminal_created(
|
||||
acp::TerminalId(uuid::Uuid::new_v4().to_string().into()),
|
||||
format!("{} {}", args.command, args.args.join(" ")),
|
||||
args.cwd.clone(),
|
||||
args.output_byte_limit,
|
||||
terminal_entity,
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
let terminal_id =
|
||||
terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?;
|
||||
Ok(acp::CreateTerminalResponse {
|
||||
terminal_id,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn kill_terminal_command(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
mod acp;
|
||||
mod claude;
|
||||
mod codex;
|
||||
mod custom;
|
||||
mod gemini;
|
||||
|
||||
@@ -8,6 +9,7 @@ pub mod e2e_tests;
|
||||
|
||||
pub use claude::*;
|
||||
use client::ProxySettings;
|
||||
pub use codex::*;
|
||||
use collections::HashMap;
|
||||
pub use custom::*;
|
||||
use fs::Fs;
|
||||
|
||||
80
crates/agent_servers/src/codex.rs
Normal file
80
crates/agent_servers/src/codex.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::rc::Rc;
|
||||
use std::{any::Any, path::Path};
|
||||
|
||||
use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
|
||||
use acp_thread::AgentConnection;
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use project::agent_server_store::CODEX_NAME;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Codex;
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
|
||||
crate::common_e2e_tests!(async |_, _, _| Codex, allow_option_id = "proceed_once");
|
||||
}
|
||||
|
||||
impl AgentServer for Codex {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"codex"
|
||||
}
|
||||
|
||||
fn name(&self) -> SharedString {
|
||||
"Codex".into()
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
ui::IconName::AiOpenAi
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let extra_env = load_proxy_env(cx);
|
||||
let default_mode = self.default_mode(cx);
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let (command, root_dir, login) = store
|
||||
.update(cx, |store, cx| {
|
||||
let agent = store
|
||||
.get_external_agent(&CODEX_NAME.into())
|
||||
.context("Codex is not registered")?;
|
||||
anyhow::Ok(agent.get_command(
|
||||
root_dir.as_deref(),
|
||||
extra_env,
|
||||
delegate.status_tx,
|
||||
// For now, report that there are no updates.
|
||||
// (A future PR will use the GitHub Releases API to fetch them.)
|
||||
delegate.new_version_available,
|
||||
&mut cx.to_async(),
|
||||
))
|
||||
})??
|
||||
.await?;
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
Ok((connection, login))
|
||||
})
|
||||
}
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -483,6 +483,13 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
default_mode: None,
|
||||
}),
|
||||
gemini: Some(crate::gemini::tests::local_command().into()),
|
||||
codex: Some(BuiltinAgentServerSettings {
|
||||
path: Some("codex-acp".into()),
|
||||
args: None,
|
||||
env: None,
|
||||
ignore_system_version: None,
|
||||
default_mode: None,
|
||||
}),
|
||||
custom: collections::HashMap::default(),
|
||||
},
|
||||
cx,
|
||||
|
||||
@@ -151,7 +151,7 @@ impl Default for AgentProfileId {
|
||||
}
|
||||
|
||||
impl Settings for AgentSettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
let agent = content.agent.clone().unwrap();
|
||||
Self {
|
||||
enabled: agent.enabled.unwrap(),
|
||||
|
||||
@@ -80,7 +80,6 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
shlex.workspace = true
|
||||
smol.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
task.workspace = true
|
||||
|
||||
@@ -203,7 +203,7 @@ impl EntryViewState {
|
||||
self.entries.drain(range);
|
||||
}
|
||||
|
||||
pub fn agent_font_size_changed(&mut self, cx: &mut App) {
|
||||
pub fn agent_ui_font_size_changed(&mut self, cx: &mut App) {
|
||||
for entry in self.entries.iter() {
|
||||
match entry {
|
||||
Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {}
|
||||
@@ -387,7 +387,7 @@ fn diff_editor_text_style_refinement(cx: &mut App) -> TextStyleRefinement {
|
||||
font_size: Some(
|
||||
TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_font_size(cx))
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_ui_font_size(cx))
|
||||
.into(),
|
||||
),
|
||||
..Default::default()
|
||||
@@ -414,7 +414,6 @@ mod tests {
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use theme::ThemeSettings;
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -544,7 +543,7 @@ mod tests {
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
});
|
||||
|
||||
@@ -290,18 +290,18 @@ impl MessageEditor {
|
||||
let snapshot = self
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.snapshot(window, cx));
|
||||
let Some((excerpt_id, _, _)) = snapshot.buffer_snapshot.as_singleton() else {
|
||||
let Some((excerpt_id, _, _)) = snapshot.buffer_snapshot().as_singleton() else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let Some(start_anchor) = snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.anchor_in_excerpt(*excerpt_id, start)
|
||||
else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let end_anchor = snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_before(start_anchor.to_offset(&snapshot.buffer_snapshot) + content_len + 1);
|
||||
.buffer_snapshot()
|
||||
.anchor_before(start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1);
|
||||
|
||||
let crease = if let MentionUri::File { abs_path } = &mention_uri
|
||||
&& let Some(extension) = abs_path.extension()
|
||||
@@ -718,7 +718,7 @@ impl MessageEditor {
|
||||
continue;
|
||||
};
|
||||
|
||||
let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot);
|
||||
let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot());
|
||||
if crease_range.start > ix {
|
||||
//todo(): Custom slash command ContentBlock?
|
||||
// let chunk = if prevent_slash_commands
|
||||
@@ -865,11 +865,11 @@ impl MessageEditor {
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
let snapshot = message_editor.snapshot(window, cx);
|
||||
let (excerpt_id, _, buffer_snapshot) =
|
||||
snapshot.buffer_snapshot.as_singleton().unwrap();
|
||||
snapshot.buffer_snapshot().as_singleton().unwrap();
|
||||
|
||||
let text_anchor = buffer_snapshot.anchor_before(buffer_snapshot.len());
|
||||
let multibuffer_anchor = snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.anchor_in_excerpt(*excerpt_id, text_anchor);
|
||||
message_editor.edit(
|
||||
[(
|
||||
@@ -1299,7 +1299,7 @@ impl Render for MessageEditor {
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_fallbacks: settings.buffer_font.fallbacks.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: settings.buffer_font_size(cx).into(),
|
||||
font_size: settings.agent_buffer_font_size(cx).into(),
|
||||
line_height: relative(settings.buffer_line_height.value()),
|
||||
..Default::default()
|
||||
};
|
||||
@@ -1550,7 +1550,7 @@ impl MentionSet {
|
||||
|
||||
fn remove_invalid(&mut self, snapshot: EditorSnapshot) {
|
||||
for (crease_id, crease) in snapshot.crease_snapshot.creases() {
|
||||
if !crease.range().start.is_valid(&snapshot.buffer_snapshot) {
|
||||
if !crease.range().start.is_valid(&snapshot.buffer_snapshot()) {
|
||||
self.mentions.remove(&crease_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use agent_client_protocol::{self as acp, PromptCapabilities};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use arrayvec::ArrayVec;
|
||||
use audio::{Audio, Sound};
|
||||
use buffer_diff::BufferDiff;
|
||||
@@ -26,7 +26,7 @@ use gpui::{
|
||||
CursorStyle, EdgesRefinement, ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, Length,
|
||||
ListOffset, ListState, PlatformDisplay, SharedString, StyleRefinement, Subscription, Task,
|
||||
TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, Window, WindowHandle, div,
|
||||
ease_in_out, linear_color_stop, linear_gradient, list, point, prelude::*, pulsating_between,
|
||||
ease_in_out, linear_color_stop, linear_gradient, list, point, pulsating_between,
|
||||
};
|
||||
use language::Buffer;
|
||||
|
||||
@@ -289,8 +289,9 @@ pub struct AcpThreadView {
|
||||
available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
|
||||
is_loading_contents: bool,
|
||||
new_server_version_available: Option<SharedString>,
|
||||
resume_thread_metadata: Option<DbThreadMetadata>,
|
||||
_cancel_task: Option<Task<()>>,
|
||||
_subscriptions: [Subscription; 4],
|
||||
_subscriptions: [Subscription; 5],
|
||||
}
|
||||
|
||||
enum ThreadState {
|
||||
@@ -380,11 +381,17 @@ impl AcpThreadView {
|
||||
)
|
||||
});
|
||||
|
||||
let agent_server_store = project.read(cx).agent_server_store().clone();
|
||||
let subscriptions = [
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::agent_font_size_changed),
|
||||
cx.observe_global_in::<AgentFontSize>(window, Self::agent_font_size_changed),
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::agent_ui_font_size_changed),
|
||||
cx.observe_global_in::<AgentFontSize>(window, Self::agent_ui_font_size_changed),
|
||||
cx.subscribe_in(&message_editor, window, Self::handle_message_editor_event),
|
||||
cx.subscribe_in(&entry_view_state, window, Self::handle_entry_view_event),
|
||||
cx.subscribe_in(
|
||||
&agent_server_store,
|
||||
window,
|
||||
Self::handle_agent_servers_updated,
|
||||
),
|
||||
];
|
||||
|
||||
Self {
|
||||
@@ -392,7 +399,14 @@ impl AcpThreadView {
|
||||
workspace: workspace.clone(),
|
||||
project: project.clone(),
|
||||
entry_view_state,
|
||||
thread_state: Self::initial_state(agent, resume_thread, workspace, project, window, cx),
|
||||
thread_state: Self::initial_state(
|
||||
agent.clone(),
|
||||
resume_thread.clone(),
|
||||
workspace.clone(),
|
||||
project.clone(),
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
login: None,
|
||||
message_editor,
|
||||
model_selector: None,
|
||||
@@ -421,13 +435,14 @@ impl AcpThreadView {
|
||||
_cancel_task: None,
|
||||
focus_handle: cx.focus_handle(),
|
||||
new_server_version_available: None,
|
||||
resume_thread_metadata: resume_thread,
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.thread_state = Self::initial_state(
|
||||
self.agent.clone(),
|
||||
None,
|
||||
self.resume_thread_metadata.clone(),
|
||||
self.workspace.clone(),
|
||||
self.project.clone(),
|
||||
window,
|
||||
@@ -775,6 +790,25 @@ impl AcpThreadView {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_agent_servers_updated(
|
||||
&mut self,
|
||||
_agent_server_store: &Entity<project::AgentServerStore>,
|
||||
_event: &project::AgentServersUpdated,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// If we're in a LoadError state OR have a thread_error set (which can happen
|
||||
// when agent.connect() fails during loading), retry loading the thread.
|
||||
// This handles the case where a thread is restored before authentication completes.
|
||||
let should_retry =
|
||||
matches!(&self.thread_state, ThreadState::LoadError(_)) || self.thread_error.is_some();
|
||||
|
||||
if should_retry {
|
||||
self.thread_error = None;
|
||||
self.reset(window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &WeakEntity<Workspace> {
|
||||
&self.workspace
|
||||
}
|
||||
@@ -1012,11 +1046,13 @@ impl AcpThreadView {
|
||||
};
|
||||
|
||||
let connection = thread.read(cx).connection().clone();
|
||||
if !connection
|
||||
.auth_methods()
|
||||
.iter()
|
||||
.any(|method| method.id.0.as_ref() == "claude-login")
|
||||
{
|
||||
let auth_methods = connection.auth_methods();
|
||||
let has_supported_auth = auth_methods.iter().any(|method| {
|
||||
let id = method.id.0.as_ref();
|
||||
id == "claude-login" || id == "spawn-gemini-cli"
|
||||
});
|
||||
let can_login = has_supported_auth || auth_methods.is_empty() || self.login.is_some();
|
||||
if !can_login {
|
||||
return;
|
||||
};
|
||||
let this = cx.weak_entity();
|
||||
@@ -1579,31 +1615,20 @@ impl AcpThreadView {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let cwd = project.read(cx).first_project_directory(cx);
|
||||
let shell = project.read(cx).terminal_settings(&cwd, cx).shell.clone();
|
||||
|
||||
window.spawn(cx, async move |cx| {
|
||||
let mut task = login.clone();
|
||||
task.command = task
|
||||
.command
|
||||
.map(|command| anyhow::Ok(shlex::try_quote(&command)?.to_string()))
|
||||
.transpose()?;
|
||||
task.args = task
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
Ok(shlex::try_quote(arg)
|
||||
.context("Failed to quote argument")?
|
||||
.to_string())
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
task.shell = task::Shell::WithArguments {
|
||||
program: task.command.take().expect("login command should be set"),
|
||||
args: std::mem::take(&mut task.args),
|
||||
title_override: None
|
||||
};
|
||||
task.full_label = task.label.clone();
|
||||
task.id = task::TaskId(format!("external-agent-{}-login", task.label));
|
||||
task.command_label = task.label.clone();
|
||||
task.use_new_terminal = true;
|
||||
task.allow_concurrent_runs = true;
|
||||
task.hide = task::HideStrategy::Always;
|
||||
task.shell = shell;
|
||||
|
||||
let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||
terminal_panel.spawn_task(&task, window, cx)
|
||||
@@ -2725,7 +2750,7 @@ impl AcpThreadView {
|
||||
|
||||
let working_dir = working_dir
|
||||
.as_ref()
|
||||
.map(|path| format!("{}", path.display()))
|
||||
.map(|path| path.display().to_string())
|
||||
.unwrap_or_else(|| "current directory".to_string());
|
||||
|
||||
let is_expanded = self.expanded_tool_calls.contains(&tool_call.id);
|
||||
@@ -3363,6 +3388,12 @@ impl AcpThreadView {
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn activity_bar_bg(&self, cx: &Context<Self>) -> Hsla {
|
||||
let editor_bg_color = cx.theme().colors().editor_background;
|
||||
let active_color = cx.theme().colors().element_selected;
|
||||
editor_bg_color.blend(active_color.opacity(0.3))
|
||||
}
|
||||
|
||||
fn render_activity_bar(
|
||||
&self,
|
||||
thread_entity: &Entity<AcpThread>,
|
||||
@@ -3378,10 +3409,6 @@ impl AcpThreadView {
|
||||
return None;
|
||||
}
|
||||
|
||||
let editor_bg_color = cx.theme().colors().editor_background;
|
||||
let active_color = cx.theme().colors().element_selected;
|
||||
let bg_edit_files_disclosure = editor_bg_color.blend(active_color.opacity(0.3));
|
||||
|
||||
// Temporarily always enable ACP edit controls. This is temporary, to lessen the
|
||||
// impact of a nasty bug that causes them to sometimes be disabled when they shouldn't
|
||||
// be, which blocks you from being able to accept or reject edits. This switches the
|
||||
@@ -3392,7 +3419,7 @@ impl AcpThreadView {
|
||||
v_flex()
|
||||
.mt_1()
|
||||
.mx_2()
|
||||
.bg(bg_edit_files_disclosure)
|
||||
.bg(self.activity_bar_bg(cx))
|
||||
.border_1()
|
||||
.border_b_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
@@ -3433,27 +3460,33 @@ impl AcpThreadView {
|
||||
.into()
|
||||
}
|
||||
|
||||
fn render_plan_summary(&self, plan: &Plan, window: &mut Window, cx: &Context<Self>) -> Div {
|
||||
fn render_plan_summary(
|
||||
&self,
|
||||
plan: &Plan,
|
||||
window: &mut Window,
|
||||
cx: &Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let stats = plan.stats();
|
||||
|
||||
let title = if let Some(entry) = stats.in_progress_entry
|
||||
&& !self.plan_expanded
|
||||
{
|
||||
h_flex()
|
||||
.w_full()
|
||||
.cursor_default()
|
||||
.relative()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.text_xs()
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.justify_between()
|
||||
.truncate()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new("Current:")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
Label::new("Current:")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.text_xs()
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.line_clamp(1)
|
||||
.child(MarkdownElement::new(
|
||||
entry.content.clone(),
|
||||
plan_label_markdown_style(&entry.status, window, cx),
|
||||
@@ -3461,10 +3494,23 @@ impl AcpThreadView {
|
||||
)
|
||||
.when(stats.pending > 0, |this| {
|
||||
this.child(
|
||||
Label::new(format!("{} left", stats.pending))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.mr_1(),
|
||||
h_flex()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.h_full()
|
||||
.child(div().min_w_8().h_full().bg(linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(self.activity_bar_bg(cx), 1.),
|
||||
linear_color_stop(self.activity_bar_bg(cx).opacity(0.2), 0.),
|
||||
)))
|
||||
.child(
|
||||
div().pr_0p5().bg(self.activity_bar_bg(cx)).child(
|
||||
Label::new(format!("{} left", stats.pending))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
@@ -3494,23 +3540,19 @@ impl AcpThreadView {
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id("plan_summary")
|
||||
.p_1()
|
||||
.justify_between()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.when(self.plan_expanded, |this| {
|
||||
this.border_b_1().border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.id("plan_summary")
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("plan_disclosure", self.plan_expanded))
|
||||
.child(title)
|
||||
.on_click(cx.listener(|this, _, _, cx| {
|
||||
this.plan_expanded = !this.plan_expanded;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(Disclosure::new("plan_disclosure", self.plan_expanded))
|
||||
.child(title)
|
||||
.on_click(cx.listener(|this, _, _, cx| {
|
||||
this.plan_expanded = !this.plan_expanded;
|
||||
cx.notify();
|
||||
}))
|
||||
}
|
||||
|
||||
fn render_plan_entries(&self, plan: &Plan, window: &mut Window, cx: &Context<Self>) -> Div {
|
||||
@@ -3712,13 +3754,10 @@ impl AcpThreadView {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!(
|
||||
"{separator}{}{separator}",
|
||||
parent.display(path_style)
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
Label::new(format!("{}{separator}", parent.display(path_style)))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
}
|
||||
});
|
||||
@@ -3762,7 +3801,7 @@ impl AcpThreadView {
|
||||
.id(("file-name", index))
|
||||
.pr_8()
|
||||
.gap_1p5()
|
||||
.max_w_full()
|
||||
.w_full()
|
||||
.overflow_x_scroll()
|
||||
.child(file_icon)
|
||||
.child(h_flex().gap_0p5().children(file_name).children(file_path))
|
||||
@@ -4914,9 +4953,9 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
|
||||
fn agent_font_size_changed(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn agent_ui_font_size_changed(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.entry_view_state.update(cx, |entry_view_state, cx| {
|
||||
entry_view_state.agent_font_size_changed(cx);
|
||||
entry_view_state.agent_ui_font_size_changed(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5546,23 +5585,23 @@ fn default_markdown_style(
|
||||
}),
|
||||
code_block: StyleRefinement {
|
||||
padding: EdgesRefinement {
|
||||
top: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
left: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
right: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
bottom: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(Pixels(8.)))),
|
||||
top: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
left: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
right: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
bottom: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(8.)))),
|
||||
},
|
||||
margin: EdgesRefinement {
|
||||
top: Some(Length::Definite(Pixels(8.).into())),
|
||||
left: Some(Length::Definite(Pixels(0.).into())),
|
||||
right: Some(Length::Definite(Pixels(0.).into())),
|
||||
bottom: Some(Length::Definite(Pixels(12.).into())),
|
||||
top: Some(Length::Definite(px(8.).into())),
|
||||
left: Some(Length::Definite(px(0.).into())),
|
||||
right: Some(Length::Definite(px(0.).into())),
|
||||
bottom: Some(Length::Definite(px(12.).into())),
|
||||
},
|
||||
border_style: Some(BorderStyle::Solid),
|
||||
border_widths: EdgesRefinement {
|
||||
top: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
left: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
right: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
bottom: Some(AbsoluteLength::Pixels(Pixels(1.))),
|
||||
top: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
left: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
right: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
bottom: Some(AbsoluteLength::Pixels(px(1.))),
|
||||
},
|
||||
border_color: Some(colors.border_variant),
|
||||
background: Some(colors.editor_background.into()),
|
||||
@@ -6047,7 +6086,7 @@ pub(crate) mod tests {
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
prompt_store::init(cx)
|
||||
|
||||
@@ -15,6 +15,7 @@ use context_server::ContextServerId;
|
||||
use editor::{Editor, SelectionEffects, scroll::Autoscroll};
|
||||
use extension::ExtensionManifest;
|
||||
use extension_host::ExtensionStore;
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
@@ -26,7 +27,7 @@ use language_model::{
|
||||
};
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, GEMINI_NAME},
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
@@ -1014,7 +1015,9 @@ impl AgentConfiguration {
|
||||
.agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME)
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -1077,15 +1080,23 @@ impl AgentConfiguration {
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiClaude,
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
})
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
|
||||
@@ -317,6 +317,8 @@ impl ManageProfilesModal {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement + use<> {
|
||||
let is_focused = profile.navigation.focus_handle.contains_focused(window, cx);
|
||||
|
||||
div()
|
||||
.id(SharedString::from(format!("profile-{}", profile.id)))
|
||||
.track_focus(&profile.navigation.focus_handle)
|
||||
@@ -328,25 +330,27 @@ impl ManageProfilesModal {
|
||||
})
|
||||
.child(
|
||||
ListItem::new(SharedString::from(format!("profile-{}", profile.id)))
|
||||
.toggle_state(profile.navigation.focus_handle.contains_focused(window, cx))
|
||||
.toggle_state(is_focused)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.child(Label::new(profile.name.clone()))
|
||||
.end_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new("Customize")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(KeyBinding::for_action_in(
|
||||
&menu::Confirm,
|
||||
&self.focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.when(is_focused, |this| {
|
||||
this.end_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new("Customize")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(KeyBinding::for_action_in(
|
||||
&menu::Confirm,
|
||||
&self.focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let profile_id = profile.id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
|
||||
@@ -562,10 +562,6 @@ impl Item for AgentDiffPane {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn set_nav_history(
|
||||
&mut self,
|
||||
nav_history: ItemNavHistory,
|
||||
@@ -850,7 +846,7 @@ fn render_diff_hunk_controls(
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let position =
|
||||
hunk_range.end.to_point(&snapshot.buffer_snapshot);
|
||||
hunk_range.end.to_point(&snapshot.buffer_snapshot());
|
||||
editor.go_to_hunk_before_or_after_position(
|
||||
&snapshot,
|
||||
position,
|
||||
@@ -886,7 +882,7 @@ fn render_diff_hunk_controls(
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let point =
|
||||
hunk_range.start.to_point(&snapshot.buffer_snapshot);
|
||||
hunk_range.start.to_point(&snapshot.buffer_snapshot());
|
||||
editor.go_to_hunk_before_or_after_position(
|
||||
&snapshot,
|
||||
point,
|
||||
@@ -1818,7 +1814,6 @@ mod tests {
|
||||
use serde_json::json;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::{path::Path, rc::Rc};
|
||||
use theme::ThemeSettings;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -1831,7 +1826,7 @@ mod tests {
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
});
|
||||
@@ -1983,7 +1978,7 @@ mod tests {
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
workspace::register_project_item::<Editor>(cx);
|
||||
|
||||
@@ -7,7 +7,7 @@ use acp_thread::AcpThread;
|
||||
use agent2::{DbThreadMetadata, HistoryEntry};
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use project::agent_server_store::{
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, GEMINI_NAME,
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
@@ -48,8 +48,8 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, Task, UpdateGlobal,
|
||||
WeakEntity, prelude::*,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, ReadGlobal as _, Subscription, Task,
|
||||
UpdateGlobal, WeakEntity, prelude::*,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{ConfigurationError, LanguageModelRegistry};
|
||||
@@ -75,6 +75,7 @@ use zed_actions::{
|
||||
assistant::{OpenRulesLibrary, ToggleFocus},
|
||||
};
|
||||
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -216,6 +217,7 @@ pub enum AgentType {
|
||||
TextThread,
|
||||
Gemini,
|
||||
ClaudeCode,
|
||||
Codex,
|
||||
NativeAgent,
|
||||
Custom {
|
||||
name: SharedString,
|
||||
@@ -230,6 +232,7 @@ impl AgentType {
|
||||
Self::NativeAgent => "Agent 2".into(),
|
||||
Self::Gemini => "Gemini CLI".into(),
|
||||
Self::ClaudeCode => "Claude Code".into(),
|
||||
Self::Codex => "Codex".into(),
|
||||
Self::Custom { name, .. } => name.into(),
|
||||
}
|
||||
}
|
||||
@@ -239,6 +242,7 @@ impl AgentType {
|
||||
Self::Zed | Self::NativeAgent | Self::TextThread => None,
|
||||
Self::Gemini => Some(IconName::AiGemini),
|
||||
Self::ClaudeCode => Some(IconName::AiClaude),
|
||||
Self::Codex => Some(IconName::AiOpenAi),
|
||||
Self::Custom { .. } => Some(IconName::Terminal),
|
||||
}
|
||||
}
|
||||
@@ -249,6 +253,7 @@ impl From<ExternalAgent> for AgentType {
|
||||
match value {
|
||||
ExternalAgent::Gemini => Self::Gemini,
|
||||
ExternalAgent::ClaudeCode => Self::ClaudeCode,
|
||||
ExternalAgent::Codex => Self::Codex,
|
||||
ExternalAgent::Custom { name, command } => Self::Custom { name, command },
|
||||
ExternalAgent::NativeAgent => Self::NativeAgent,
|
||||
}
|
||||
@@ -514,6 +519,14 @@ impl AgentPanel {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
if SettingsStore::global(cx)
|
||||
.get::<DisableAiSettings>(None)
|
||||
.disable_ai
|
||||
{
|
||||
return panel;
|
||||
}
|
||||
|
||||
panel.as_mut(cx).loading = true;
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
@@ -1103,15 +1116,15 @@ impl AgentPanel {
|
||||
WhichFontSize::AgentFont => {
|
||||
if persist {
|
||||
update_settings_file(self.fs.clone(), cx, move |settings, cx| {
|
||||
let agent_font_size =
|
||||
ThemeSettings::get_global(cx).agent_font_size(cx) + delta;
|
||||
let agent_ui_font_size =
|
||||
ThemeSettings::get_global(cx).agent_ui_font_size(cx) + delta;
|
||||
let _ = settings
|
||||
.theme
|
||||
.agent_font_size
|
||||
.insert(theme::clamp_font_size(agent_font_size).into());
|
||||
.agent_ui_font_size
|
||||
.insert(theme::clamp_font_size(agent_ui_font_size).into());
|
||||
});
|
||||
} else {
|
||||
theme::adjust_agent_font_size(cx, |size| size + delta);
|
||||
theme::adjust_agent_ui_font_size(cx, |size| size + delta);
|
||||
}
|
||||
}
|
||||
WhichFontSize::BufferFont => {
|
||||
@@ -1131,10 +1144,10 @@ impl AgentPanel {
|
||||
) {
|
||||
if action.persist {
|
||||
update_settings_file(self.fs.clone(), cx, move |settings, _| {
|
||||
settings.theme.agent_font_size = None;
|
||||
settings.theme.agent_ui_font_size = None;
|
||||
});
|
||||
} else {
|
||||
theme::reset_agent_font_size(cx);
|
||||
theme::reset_agent_ui_font_size(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1427,6 +1440,11 @@ impl AgentPanel {
|
||||
cx,
|
||||
)
|
||||
}
|
||||
AgentType::Codex => {
|
||||
self.selected_agent = AgentType::Codex;
|
||||
self.serialize(cx);
|
||||
self.external_thread(Some(crate::ExternalAgent::Codex), None, None, window, cx)
|
||||
}
|
||||
AgentType::Custom { name, command } => self.external_thread(
|
||||
Some(crate::ExternalAgent::Custom { name, command }),
|
||||
None,
|
||||
@@ -1939,32 +1957,6 @@ impl AgentPanel {
|
||||
)
|
||||
.separator()
|
||||
.header("External Agents")
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Gemini,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Claude Code Thread")
|
||||
.icon(IconName::AiClaude)
|
||||
@@ -1991,12 +1983,66 @@ impl AgentPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Gemini,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|mut menu| {
|
||||
let agent_names = agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
@@ -2532,7 +2578,7 @@ impl Render for AgentPanel {
|
||||
|
||||
match self.active_view.which_font_size_used() {
|
||||
WhichFontSize::AgentFont => {
|
||||
WithRemSize::new(ThemeSettings::get_global(cx).agent_font_size(cx))
|
||||
WithRemSize::new(ThemeSettings::get_global(cx).agent_ui_font_size(cx))
|
||||
.size_full()
|
||||
.child(content)
|
||||
.into_any()
|
||||
|
||||
@@ -167,6 +167,7 @@ enum ExternalAgent {
|
||||
#[default]
|
||||
Gemini,
|
||||
ClaudeCode,
|
||||
Codex,
|
||||
NativeAgent,
|
||||
Custom {
|
||||
name: SharedString,
|
||||
@@ -188,6 +189,7 @@ impl ExternalAgent {
|
||||
Self::NativeAgent => "zed",
|
||||
Self::Gemini => "gemini-cli",
|
||||
Self::ClaudeCode => "claude-code",
|
||||
Self::Codex => "codex",
|
||||
Self::Custom { .. } => "custom",
|
||||
}
|
||||
}
|
||||
@@ -200,6 +202,7 @@ impl ExternalAgent {
|
||||
match self {
|
||||
Self::Gemini => Rc::new(agent_servers::Gemini),
|
||||
Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
|
||||
Self::Codex => Rc::new(agent_servers::Codex),
|
||||
Self::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)),
|
||||
Self::Custom { name, command: _ } => {
|
||||
Rc::new(agent_servers::CustomAgentServer::new(name.clone()))
|
||||
|
||||
@@ -18,7 +18,9 @@ use agent_settings::AgentSettings;
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{HashMap, HashSet, VecDeque, hash_map};
|
||||
use editor::RowExt;
|
||||
use editor::SelectionEffects;
|
||||
use editor::scroll::ScrollOffset;
|
||||
use editor::{
|
||||
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange,
|
||||
MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
|
||||
@@ -380,7 +382,7 @@ impl InlineAssistant {
|
||||
if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) {
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
let range = assist.range.to_point(&snapshot.buffer_snapshot);
|
||||
let range = assist.range.to_point(&snapshot.buffer_snapshot());
|
||||
if range.start.row <= newest_selection.start.row
|
||||
&& newest_selection.end.row <= range.end.row
|
||||
{
|
||||
@@ -400,16 +402,16 @@ impl InlineAssistant {
|
||||
selection.end.row -= 1;
|
||||
}
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
} else if let Some(fold) =
|
||||
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
|
||||
{
|
||||
selection.start = fold.range().start;
|
||||
selection.end = fold.range().end;
|
||||
if MultiBufferRow(selection.end.row) < snapshot.buffer_snapshot.max_row() {
|
||||
if MultiBufferRow(selection.end.row) < snapshot.buffer_snapshot().max_row() {
|
||||
let chars = snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.chars_at(Point::new(selection.end.row + 1, 0));
|
||||
|
||||
for c in chars {
|
||||
@@ -425,7 +427,7 @@ impl InlineAssistant {
|
||||
{
|
||||
selection.end.row += 1;
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
}
|
||||
}
|
||||
@@ -445,7 +447,7 @@ impl InlineAssistant {
|
||||
}
|
||||
selections.push(selection);
|
||||
}
|
||||
let snapshot = &snapshot.buffer_snapshot;
|
||||
let snapshot = &snapshot.buffer_snapshot();
|
||||
let newest_selection = newest_selection.unwrap();
|
||||
|
||||
let mut codegen_ranges = Vec::new();
|
||||
@@ -744,7 +746,7 @@ impl InlineAssistant {
|
||||
let scroll_bottom = scroll_top + editor.visible_line_count().unwrap_or(0.);
|
||||
editor_assists.scroll_lock = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)
|
||||
.map(|row| row.0 as f32)
|
||||
.map(|row| row.as_f64())
|
||||
.filter(|prompt_row| (scroll_top..scroll_bottom).contains(&prompt_row))
|
||||
.map(|prompt_row| InlineAssistScrollLock {
|
||||
assist_id,
|
||||
@@ -910,7 +912,9 @@ impl InlineAssistant {
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
let scroll_position = editor.scroll_position(cx);
|
||||
let target_scroll_top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32
|
||||
let target_scroll_top = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)?
|
||||
.as_f64()
|
||||
- scroll_lock.distance_from_top;
|
||||
if target_scroll_top != scroll_position.y {
|
||||
editor.set_scroll_position(point(scroll_position.x, target_scroll_top), window, cx);
|
||||
@@ -959,8 +963,9 @@ impl InlineAssistant {
|
||||
if let Some(decorations) = assist.decorations.as_ref() {
|
||||
let distance_from_top = editor.update(cx, |editor, cx| {
|
||||
let scroll_top = editor.scroll_position(cx).y;
|
||||
let prompt_row =
|
||||
editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32;
|
||||
let prompt_row = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)?
|
||||
.0 as ScrollOffset;
|
||||
Some(prompt_row - scroll_top)
|
||||
});
|
||||
|
||||
@@ -1192,8 +1197,8 @@ impl InlineAssistant {
|
||||
let mut scroll_target_range = None;
|
||||
if let Some(decorations) = assist.decorations.as_ref() {
|
||||
scroll_target_range = maybe!({
|
||||
let top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32;
|
||||
let bottom = editor.row_for_block(decorations.end_block_id, cx)?.0 as f32;
|
||||
let top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f64;
|
||||
let bottom = editor.row_for_block(decorations.end_block_id, cx)?.0 as f64;
|
||||
Some((top, bottom))
|
||||
});
|
||||
if scroll_target_range.is_none() {
|
||||
@@ -1207,15 +1212,15 @@ impl InlineAssistant {
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
let top = start_row.0 as f32;
|
||||
let top = start_row.0 as ScrollOffset;
|
||||
let bottom = top + 1.0;
|
||||
(top, bottom)
|
||||
});
|
||||
let mut scroll_target_top = scroll_target_range.0;
|
||||
let mut scroll_target_bottom = scroll_target_range.1;
|
||||
|
||||
scroll_target_top -= editor.vertical_scroll_margin() as f32;
|
||||
scroll_target_bottom += editor.vertical_scroll_margin() as f32;
|
||||
scroll_target_top -= editor.vertical_scroll_margin() as ScrollOffset;
|
||||
scroll_target_bottom += editor.vertical_scroll_margin() as ScrollOffset;
|
||||
|
||||
let height_in_lines = editor.visible_line_count().unwrap_or(0.);
|
||||
let scroll_top = editor.scroll_position(cx).y;
|
||||
@@ -1543,7 +1548,7 @@ struct EditorInlineAssists {
|
||||
|
||||
struct InlineAssistScrollLock {
|
||||
assist_id: InlineAssistId,
|
||||
distance_from_top: f32,
|
||||
distance_from_top: ScrollOffset,
|
||||
}
|
||||
|
||||
impl EditorInlineAssists {
|
||||
|
||||
@@ -3,12 +3,20 @@ use agent_settings::{
|
||||
AgentProfile, AgentProfileId, AgentSettings, AvailableProfiles, builtin_profiles,
|
||||
};
|
||||
use fs::Fs;
|
||||
use gpui::{Action, Entity, FocusHandle, Subscription, prelude::*};
|
||||
use settings::{DockPosition, Settings as _, SettingsStore, update_settings_file};
|
||||
use std::sync::Arc;
|
||||
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
|
||||
use gpui::{
|
||||
Action, AnyElement, App, BackgroundExecutor, Context, DismissEvent, Entity, FocusHandle,
|
||||
Focusable, SharedString, Subscription, Task, Window,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate, popover_menu::PickerPopoverMenu};
|
||||
use settings::{Settings as _, SettingsStore, update_settings_file};
|
||||
use std::{
|
||||
sync::atomic::Ordering,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::{
|
||||
ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, PopoverMenu,
|
||||
PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize,
|
||||
ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
};
|
||||
|
||||
/// Trait for types that can provide and manage agent profiles
|
||||
@@ -25,9 +33,11 @@ pub trait ProfileProvider {
|
||||
|
||||
pub struct ProfileSelector {
|
||||
profiles: AvailableProfiles,
|
||||
pending_refresh: bool,
|
||||
fs: Arc<dyn Fs>,
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
picker: Option<Entity<Picker<ProfilePickerDelegate>>>,
|
||||
picker_handle: PopoverMenuHandle<Picker<ProfilePickerDelegate>>,
|
||||
focus_handle: FocusHandle,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
@@ -40,125 +50,91 @@ impl ProfileSelector {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let settings_subscription = cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
this.refresh_profiles(cx);
|
||||
this.pending_refresh = true;
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Self {
|
||||
profiles: AgentProfile::available_profiles(cx),
|
||||
pending_refresh: false,
|
||||
fs,
|
||||
provider,
|
||||
menu_handle: PopoverMenuHandle::default(),
|
||||
picker: None,
|
||||
picker_handle: PopoverMenuHandle::default(),
|
||||
focus_handle,
|
||||
_subscriptions: vec![settings_subscription],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn menu_handle(&self) -> PopoverMenuHandle<ContextMenu> {
|
||||
self.menu_handle.clone()
|
||||
pub fn menu_handle(&self) -> PopoverMenuHandle<Picker<ProfilePickerDelegate>> {
|
||||
self.picker_handle.clone()
|
||||
}
|
||||
|
||||
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
|
||||
self.profiles = AgentProfile::available_profiles(cx);
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
&self,
|
||||
fn ensure_picker(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
) -> Entity<Picker<ProfilePickerDelegate>> {
|
||||
if self.picker.is_none() {
|
||||
let delegate = ProfilePickerDelegate::new(
|
||||
self.fs.clone(),
|
||||
self.provider.clone(),
|
||||
self.profiles.clone(),
|
||||
cx.background_executor().clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
let mut found_non_builtin = false;
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if !builtin_profiles::is_builtin(profile_id) {
|
||||
found_non_builtin = true;
|
||||
continue;
|
||||
}
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile_name,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
let picker = cx.new(|cx| {
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(18.))
|
||||
.max_height(Some(rems(20.).into()))
|
||||
});
|
||||
|
||||
if found_non_builtin {
|
||||
menu = menu.separator().header("Custom Profiles");
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if builtin_profiles::is_builtin(profile_id) {
|
||||
continue;
|
||||
}
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile_name,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
}
|
||||
self.picker = Some(picker);
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
menu = menu.item(ContextMenuEntry::new("Configure Profiles…").handler(
|
||||
move |window, cx| {
|
||||
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
|
||||
},
|
||||
));
|
||||
|
||||
menu
|
||||
})
|
||||
}
|
||||
|
||||
fn menu_entry_for_profile(
|
||||
&self,
|
||||
profile_id: AgentProfileId,
|
||||
profile_name: &SharedString,
|
||||
settings: &AgentSettings,
|
||||
cx: &App,
|
||||
) -> ContextMenuEntry {
|
||||
let documentation = match profile_name.to_lowercase().as_str() {
|
||||
builtin_profiles::WRITE => Some("Get help to write anything."),
|
||||
builtin_profiles::ASK => Some("Chat about your codebase."),
|
||||
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
|
||||
_ => None,
|
||||
};
|
||||
let thread_profile_id = self.provider.profile_id(cx);
|
||||
|
||||
let entry = ContextMenuEntry::new(profile_name.clone())
|
||||
.toggleable(IconPosition::End, profile_id == thread_profile_id);
|
||||
|
||||
let entry = if let Some(doc_text) = documentation {
|
||||
entry.documentation_aside(
|
||||
documentation_side(settings.dock),
|
||||
DocumentationEdge::Top,
|
||||
move |_| Label::new(doc_text).into_any_element(),
|
||||
)
|
||||
} else {
|
||||
entry
|
||||
};
|
||||
|
||||
entry.handler({
|
||||
let fs = self.fs.clone();
|
||||
let provider = self.provider.clone();
|
||||
move |_window, cx| {
|
||||
update_settings_file(fs.clone(), cx, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |settings, _cx| {
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_profile(profile_id.0);
|
||||
}
|
||||
if self.pending_refresh {
|
||||
if let Some(picker) = &self.picker {
|
||||
let profiles = AgentProfile::available_profiles(cx);
|
||||
self.profiles = profiles.clone();
|
||||
picker.update(cx, |picker, cx| {
|
||||
let query = picker.query(cx);
|
||||
picker
|
||||
.delegate
|
||||
.refresh_profiles(profiles.clone(), query, cx);
|
||||
});
|
||||
|
||||
provider.set_profile(profile_id.clone(), cx);
|
||||
}
|
||||
})
|
||||
self.pending_refresh = false;
|
||||
}
|
||||
|
||||
self.picker.as_ref().unwrap().clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for ProfileSelector {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
if let Some(picker) = &self.picker {
|
||||
picker.focus_handle(cx)
|
||||
} else {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProfileSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
if !self.provider.profiles_supported(cx) {
|
||||
return Button::new("tools-not-supported-button", "Tools Unsupported")
|
||||
.disabled(true)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.tooltip(Tooltip::text("This model does not support tools."))
|
||||
.into_any_element();
|
||||
}
|
||||
|
||||
let picker = self.ensure_picker(window, cx);
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let profile_id = self.provider.profile_id(cx);
|
||||
let profile = settings.profiles.get(&profile_id);
|
||||
@@ -166,62 +142,594 @@ impl Render for ProfileSelector {
|
||||
let selected_profile = profile
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
if self.provider.profiles_supported(cx) {
|
||||
let this = cx.entity();
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
let trigger_button = Button::new("profile-selector-model", selected_profile)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.icon(IconName::ChevronDown)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_position(IconPosition::End)
|
||||
.icon_color(Color::Muted)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent));
|
||||
let trigger_button = Button::new("profile-selector", selected_profile)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.icon(IconName::ChevronDown)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_position(IconPosition::End)
|
||||
.icon_color(Color::Muted)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent));
|
||||
|
||||
PopoverMenu::new("profile-selector")
|
||||
.trigger_with_tooltip(trigger_button, {
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Toggle Profile Menu",
|
||||
&ToggleProfileSelector,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.anchor(
|
||||
if documentation_side(settings.dock) == DocumentationSide::Left {
|
||||
gpui::Corner::BottomRight
|
||||
} else {
|
||||
gpui::Corner::BottomLeft
|
||||
},
|
||||
PickerPopoverMenu::new(
|
||||
picker,
|
||||
trigger_button,
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Toggle Profile Menu",
|
||||
&ToggleProfileSelector,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.with_handle(self.menu_handle.clone())
|
||||
.menu(move |window, cx| {
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
})
|
||||
.offset(gpui::Point {
|
||||
x: px(0.0),
|
||||
y: px(-2.0),
|
||||
})
|
||||
.into_any_element()
|
||||
},
|
||||
gpui::Corner::BottomRight,
|
||||
cx,
|
||||
)
|
||||
.with_handle(self.picker_handle.clone())
|
||||
.render(window, cx)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ProfileCandidate {
|
||||
id: AgentProfileId,
|
||||
name: SharedString,
|
||||
is_builtin: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ProfileMatchEntry {
|
||||
candidate_index: usize,
|
||||
positions: Vec<usize>,
|
||||
}
|
||||
|
||||
enum ProfilePickerEntry {
|
||||
Header(SharedString),
|
||||
Profile(ProfileMatchEntry),
|
||||
}
|
||||
|
||||
pub(crate) struct ProfilePickerDelegate {
|
||||
fs: Arc<dyn Fs>,
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
background: BackgroundExecutor,
|
||||
candidates: Vec<ProfileCandidate>,
|
||||
string_candidates: Arc<Vec<StringMatchCandidate>>,
|
||||
filtered_entries: Vec<ProfilePickerEntry>,
|
||||
selected_index: usize,
|
||||
query: String,
|
||||
cancel: Option<Arc<AtomicBool>>,
|
||||
}
|
||||
|
||||
impl ProfilePickerDelegate {
|
||||
fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
profiles: AvailableProfiles,
|
||||
background: BackgroundExecutor,
|
||||
cx: &mut Context<ProfileSelector>,
|
||||
) -> Self {
|
||||
let candidates = Self::candidates_from(profiles);
|
||||
let string_candidates = Arc::new(Self::string_candidates(&candidates));
|
||||
let filtered_entries = Self::entries_from_candidates(&candidates);
|
||||
|
||||
let mut this = Self {
|
||||
fs,
|
||||
provider,
|
||||
background,
|
||||
candidates,
|
||||
string_candidates,
|
||||
filtered_entries,
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
|
||||
this.selected_index = this
|
||||
.index_of_profile(&this.provider.profile_id(cx))
|
||||
.unwrap_or_else(|| this.first_selectable_index().unwrap_or(0));
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
fn refresh_profiles(
|
||||
&mut self,
|
||||
profiles: AvailableProfiles,
|
||||
query: String,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.candidates = Self::candidates_from(profiles);
|
||||
self.string_candidates = Arc::new(Self::string_candidates(&self.candidates));
|
||||
self.query = query;
|
||||
|
||||
if self.query.is_empty() {
|
||||
self.filtered_entries = Self::entries_from_candidates(&self.candidates);
|
||||
} else {
|
||||
Button::new("tools-not-supported-button", "Tools Unsupported")
|
||||
.disabled(true)
|
||||
.label_size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.tooltip(Tooltip::text("This model does not support tools."))
|
||||
.into_any_element()
|
||||
let matches = self.search_blocking(&self.query);
|
||||
self.filtered_entries = self.entries_from_matches(matches);
|
||||
}
|
||||
|
||||
self.selected_index = self
|
||||
.index_of_profile(&self.provider.profile_id(cx))
|
||||
.unwrap_or_else(|| self.first_selectable_index().unwrap_or(0));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn candidates_from(profiles: AvailableProfiles) -> Vec<ProfileCandidate> {
|
||||
profiles
|
||||
.into_iter()
|
||||
.map(|(id, name)| ProfileCandidate {
|
||||
is_builtin: builtin_profiles::is_builtin(&id),
|
||||
id,
|
||||
name,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn string_candidates(candidates: &[ProfileCandidate]) -> Vec<StringMatchCandidate> {
|
||||
candidates
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatchCandidate::new(index, candidate.name.as_ref()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn documentation(candidate: &ProfileCandidate) -> Option<&'static str> {
|
||||
match candidate.id.as_str() {
|
||||
builtin_profiles::WRITE => Some("Get help to write anything."),
|
||||
builtin_profiles::ASK => Some("Chat about your codebase."),
|
||||
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn entries_from_candidates(candidates: &[ProfileCandidate]) -> Vec<ProfilePickerEntry> {
|
||||
let mut entries = Vec::new();
|
||||
let mut inserted_custom_header = false;
|
||||
|
||||
for (idx, candidate) in candidates.iter().enumerate() {
|
||||
if !candidate.is_builtin && !inserted_custom_header {
|
||||
if !entries.is_empty() {
|
||||
entries.push(ProfilePickerEntry::Header("Custom Profiles".into()));
|
||||
}
|
||||
inserted_custom_header = true;
|
||||
}
|
||||
|
||||
entries.push(ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: idx,
|
||||
positions: Vec::new(),
|
||||
}));
|
||||
}
|
||||
|
||||
entries
|
||||
}
|
||||
|
||||
fn entries_from_matches(&self, matches: Vec<StringMatch>) -> Vec<ProfilePickerEntry> {
|
||||
let mut entries = Vec::new();
|
||||
for mat in matches {
|
||||
if self.candidates.get(mat.candidate_id).is_some() {
|
||||
entries.push(ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: mat.candidate_id,
|
||||
positions: mat.positions,
|
||||
}));
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
fn first_selectable_index(&self) -> Option<usize> {
|
||||
self.filtered_entries
|
||||
.iter()
|
||||
.position(|entry| matches!(entry, ProfilePickerEntry::Profile(_)))
|
||||
}
|
||||
|
||||
fn index_of_profile(&self, profile_id: &AgentProfileId) -> Option<usize> {
|
||||
self.filtered_entries.iter().position(|entry| {
|
||||
matches!(entry, ProfilePickerEntry::Profile(profile) if self
|
||||
.candidates
|
||||
.get(profile.candidate_index)
|
||||
.map(|candidate| &candidate.id == profile_id)
|
||||
.unwrap_or(false))
|
||||
})
|
||||
}
|
||||
|
||||
fn search_blocking(&self, query: &str) -> Vec<StringMatch> {
|
||||
if query.is_empty() {
|
||||
return self
|
||||
.string_candidates
|
||||
.iter()
|
||||
.map(|candidate| StringMatch {
|
||||
candidate_id: candidate.id,
|
||||
score: 0.0,
|
||||
positions: Vec::new(),
|
||||
string: candidate.string.clone(),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
let cancel_flag = AtomicBool::new(false);
|
||||
|
||||
self.background.block(match_strings(
|
||||
self.string_candidates.as_ref(),
|
||||
query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
&cancel_flag,
|
||||
self.background.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for ProfilePickerDelegate {
|
||||
type ListItem = AnyElement;
|
||||
|
||||
fn placeholder_text(&self, _: &mut Window, _: &mut App) -> Arc<str> {
|
||||
"Search profiles…".into()
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
let text = if self.candidates.is_empty() {
|
||||
"No profiles.".into()
|
||||
} else {
|
||||
"No profiles match your search.".into()
|
||||
};
|
||||
Some(text)
|
||||
}
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.filtered_entries.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.selected_index = ix.min(self.filtered_entries.len().saturating_sub(1));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn can_select(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> bool {
|
||||
match self.filtered_entries.get(ix) {
|
||||
Some(ProfilePickerEntry::Profile(_)) => true,
|
||||
Some(ProfilePickerEntry::Header(_)) | None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
if query.is_empty() {
|
||||
self.query.clear();
|
||||
self.filtered_entries = Self::entries_from_candidates(&self.candidates);
|
||||
self.selected_index = self
|
||||
.index_of_profile(&self.provider.profile_id(cx))
|
||||
.unwrap_or_else(|| self.first_selectable_index().unwrap_or(0));
|
||||
cx.notify();
|
||||
return Task::ready(());
|
||||
}
|
||||
|
||||
if let Some(prev) = &self.cancel {
|
||||
prev.store(true, Ordering::Relaxed);
|
||||
}
|
||||
let cancel = Arc::new(AtomicBool::new(false));
|
||||
self.cancel = Some(cancel.clone());
|
||||
|
||||
let string_candidates = self.string_candidates.clone();
|
||||
let background = self.background.clone();
|
||||
let provider = self.provider.clone();
|
||||
self.query = query.clone();
|
||||
|
||||
let cancel_for_future = cancel;
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = match_strings(
|
||||
string_candidates.as_ref(),
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
cancel_for_future.as_ref(),
|
||||
background,
|
||||
)
|
||||
.await;
|
||||
|
||||
this.update_in(cx, |this, _, cx| {
|
||||
if this.delegate.query != query {
|
||||
return;
|
||||
}
|
||||
|
||||
this.delegate.filtered_entries = this.delegate.entries_from_matches(matches);
|
||||
this.delegate.selected_index = this
|
||||
.delegate
|
||||
.index_of_profile(&provider.profile_id(cx))
|
||||
.unwrap_or_else(|| this.delegate.first_selectable_index().unwrap_or(0));
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
match self.filtered_entries.get(self.selected_index) {
|
||||
Some(ProfilePickerEntry::Profile(entry)) => {
|
||||
if let Some(candidate) = self.candidates.get(entry.candidate_index) {
|
||||
let profile_id = candidate.id.clone();
|
||||
let fs = self.fs.clone();
|
||||
let provider = self.provider.clone();
|
||||
|
||||
update_settings_file(fs, cx, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |settings, _cx| {
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_profile(profile_id.0);
|
||||
}
|
||||
});
|
||||
|
||||
provider.set_profile(profile_id.clone(), cx);
|
||||
|
||||
telemetry::event!(
|
||||
"agent_profile_switched",
|
||||
profile_id = profile_id.as_str(),
|
||||
source = "picker"
|
||||
);
|
||||
}
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
cx.defer_in(window, |picker, window, cx| {
|
||||
picker.set_query("", window, cx);
|
||||
});
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
match self.filtered_entries.get(ix)? {
|
||||
ProfilePickerEntry::Header(label) => Some(
|
||||
div()
|
||||
.px_2p5()
|
||||
.pb_0p5()
|
||||
.when(ix > 0, |this| {
|
||||
this.mt_1p5()
|
||||
.pt_2()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
})
|
||||
.child(
|
||||
Label::new(label.clone())
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.into_any_element(),
|
||||
),
|
||||
ProfilePickerEntry::Profile(entry) => {
|
||||
let candidate = self.candidates.get(entry.candidate_index)?;
|
||||
let active_id = self.provider.profile_id(cx);
|
||||
let is_active = active_id == candidate.id;
|
||||
|
||||
Some(
|
||||
ListItem::new(SharedString::from(candidate.id.0.clone()))
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.child(HighlightedLabel::new(
|
||||
candidate.name.clone(),
|
||||
entry.positions.clone(),
|
||||
))
|
||||
.when(is_active, |this| {
|
||||
this.end_slot(
|
||||
div()
|
||||
.pr_2()
|
||||
.child(Icon::new(IconName::Check).color(Color::Accent)),
|
||||
)
|
||||
})
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn documentation_aside(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<DocumentationAside> {
|
||||
use std::rc::Rc;
|
||||
|
||||
let entry = match self.filtered_entries.get(self.selected_index)? {
|
||||
ProfilePickerEntry::Profile(entry) => entry,
|
||||
ProfilePickerEntry::Header(_) => return None,
|
||||
};
|
||||
|
||||
let candidate = self.candidates.get(entry.candidate_index)?;
|
||||
let docs_aside = Self::documentation(candidate)?.to_string();
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let side = match settings.dock {
|
||||
settings::DockPosition::Left => DocumentationSide::Right,
|
||||
settings::DockPosition::Bottom | settings::DockPosition::Right => {
|
||||
DocumentationSide::Left
|
||||
}
|
||||
};
|
||||
|
||||
Some(DocumentationAside {
|
||||
side,
|
||||
edge: DocumentationEdge::Top,
|
||||
render: Rc::new(move |_| Label::new(docs_aside.clone()).into_any_element()),
|
||||
})
|
||||
}
|
||||
|
||||
fn render_footer(
|
||||
&self,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.p_1()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
.child(
|
||||
Button::new("configure", "Configure")
|
||||
.icon(IconName::Settings)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
|
||||
}),
|
||||
)
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
|
||||
#[gpui::test]
|
||||
fn entries_include_custom_profiles(_cx: &mut TestAppContext) {
|
||||
let candidates = vec![
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("write".into()),
|
||||
name: SharedString::from("Write"),
|
||||
is_builtin: true,
|
||||
},
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("my-custom".into()),
|
||||
name: SharedString::from("My Custom"),
|
||||
is_builtin: false,
|
||||
},
|
||||
];
|
||||
|
||||
let entries = ProfilePickerDelegate::entries_from_candidates(&candidates);
|
||||
|
||||
assert!(entries.iter().any(|entry| matches!(
|
||||
entry,
|
||||
ProfilePickerEntry::Profile(profile)
|
||||
if candidates[profile.candidate_index].id.as_str() == "my-custom"
|
||||
)));
|
||||
assert!(entries.iter().any(|entry| matches!(
|
||||
entry,
|
||||
ProfilePickerEntry::Header(label) if label.as_ref() == "Custom Profiles"
|
||||
)));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn fuzzy_filter_returns_no_results_and_keeps_configure(cx: &mut TestAppContext) {
|
||||
let candidates = vec![ProfileCandidate {
|
||||
id: AgentProfileId("write".into()),
|
||||
name: SharedString::from("Write"),
|
||||
is_builtin: true,
|
||||
}];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: Vec::new(),
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
|
||||
let matches = Vec::new(); // No matches
|
||||
let _entries = delegate.entries_from_matches(matches);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn active_profile_selection_logic_works(cx: &mut TestAppContext) {
|
||||
let candidates = vec![
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("write".into()),
|
||||
name: SharedString::from("Write"),
|
||||
is_builtin: true,
|
||||
},
|
||||
ProfileCandidate {
|
||||
id: AgentProfileId("ask".into()),
|
||||
name: SharedString::from("Ask"),
|
||||
is_builtin: true,
|
||||
},
|
||||
];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: vec![
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 0,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 1,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
],
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
|
||||
// Active profile should be found at index 0
|
||||
let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
|
||||
assert_eq!(active_index, Some(0));
|
||||
}
|
||||
|
||||
struct TestProfileProvider {
|
||||
profile_id: AgentProfileId,
|
||||
}
|
||||
|
||||
impl TestProfileProvider {
|
||||
fn new(profile_id: AgentProfileId) -> Self {
|
||||
Self { profile_id }
|
||||
}
|
||||
}
|
||||
|
||||
impl ProfileProvider for TestProfileProvider {
|
||||
fn profile_id(&self, _cx: &App) -> AgentProfileId {
|
||||
self.profile_id.clone()
|
||||
}
|
||||
|
||||
fn set_profile(&self, _profile_id: AgentProfileId, _cx: &mut App) {}
|
||||
|
||||
fn profiles_supported(&self, _cx: &App) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn documentation_side(position: DockPosition) -> DocumentationSide {
|
||||
match position {
|
||||
DockPosition::Left => DocumentationSide::Right,
|
||||
DockPosition::Bottom => DocumentationSide::Left,
|
||||
DockPosition::Right => DocumentationSide::Left,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ use editor::{
|
||||
BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata, CustomBlockId, FoldId,
|
||||
RenderBlock, ToDisplayPoint,
|
||||
},
|
||||
scroll::ScrollOffset,
|
||||
};
|
||||
use editor::{FoldPlaceholder, display_map::CreaseId};
|
||||
use fs::Fs;
|
||||
@@ -108,7 +109,7 @@ pub enum InsertDraggedFiles {
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
struct ScrollPosition {
|
||||
offset_before_cursor: gpui::Point<f32>,
|
||||
offset_before_cursor: gpui::Point<ScrollOffset>,
|
||||
cursor: Anchor,
|
||||
}
|
||||
|
||||
@@ -631,7 +632,7 @@ impl TextThreadEditor {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let cursor_point = scroll_position.cursor.to_display_point(&snapshot);
|
||||
let scroll_top =
|
||||
cursor_point.row().as_f32() - scroll_position.offset_before_cursor.y;
|
||||
cursor_point.row().as_f64() - scroll_position.offset_before_cursor.y;
|
||||
editor.set_scroll_position(
|
||||
point(scroll_position.offset_before_cursor.x, scroll_top),
|
||||
window,
|
||||
@@ -979,7 +980,7 @@ impl TextThreadEditor {
|
||||
let cursor_row = cursor
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row()
|
||||
.as_f32();
|
||||
.as_f64();
|
||||
let scroll_position = editor
|
||||
.scroll_manager
|
||||
.anchor()
|
||||
|
||||
@@ -48,7 +48,7 @@ impl Render for BurnModeTooltip {
|
||||
let keybinding = KeyBinding::for_action(&ToggleBurnMode, window, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.)));
|
||||
|
||||
tooltip_container(window, cx, |this, _, _| {
|
||||
tooltip_container(cx, |this, _| {
|
||||
this
|
||||
.child(
|
||||
h_flex()
|
||||
|
||||
@@ -704,7 +704,7 @@ impl ContextPillHover {
|
||||
|
||||
impl Render for ContextPillHover {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(window, cx, move |this, window, cx| {
|
||||
tooltip_container(cx, move |this, cx| {
|
||||
this.occlude()
|
||||
.on_mouse_move(|_, _, cx| cx.stop_propagation())
|
||||
.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
|
||||
|
||||
@@ -12,8 +12,8 @@ impl UnavailableEditingTooltip {
|
||||
}
|
||||
|
||||
impl Render for UnavailableEditingTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(window, cx, |this, _, _| {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(cx, |this, _| {
|
||||
this.child(Label::new("Unavailable Editing")).child(
|
||||
div().max_w_64().child(
|
||||
Label::new(format!(
|
||||
|
||||
@@ -16,8 +16,8 @@ anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
net.workspace = true
|
||||
proto.workspace = true
|
||||
smol.workspace = true
|
||||
log.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
@@ -25,3 +25,6 @@ zeroize.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
windows.workspace = true
|
||||
|
||||
[package.metadata.cargo-machete]
|
||||
ignored = ["log"]
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
mod encrypted_password;
|
||||
|
||||
pub use encrypted_password::{EncryptedPassword, ProcessExt};
|
||||
pub use encrypted_password::{EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use net::async_net::UnixListener;
|
||||
use smol::lock::Mutex;
|
||||
use util::fs::make_file_executable;
|
||||
|
||||
use std::ffi::OsStr;
|
||||
use std::ops::ControlFlow;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
use std::{ffi::OsStr, time::Duration};
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use futures::channel::{mpsc, oneshot};
|
||||
@@ -14,9 +20,13 @@ use futures::{
|
||||
};
|
||||
use gpui::{AsyncApp, BackgroundExecutor, Task};
|
||||
use smol::fs;
|
||||
use util::ResultExt as _;
|
||||
use util::{ResultExt as _, debug_panic, maybe, paths::PathExt};
|
||||
|
||||
use crate::encrypted_password::decrypt;
|
||||
/// Path to the program used for askpass
|
||||
///
|
||||
/// On Unix and remote servers, this defaults to the current executable
|
||||
/// On Windows, this is set to the CLI variant of zed
|
||||
static ASKPASS_PROGRAM: OnceLock<std::path::PathBuf> = OnceLock::new();
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum AskPassResult {
|
||||
@@ -26,6 +36,7 @@ pub enum AskPassResult {
|
||||
|
||||
pub struct AskPassDelegate {
|
||||
tx: mpsc::UnboundedSender<(String, oneshot::Sender<EncryptedPassword>)>,
|
||||
executor: BackgroundExecutor,
|
||||
_task: Task<()>,
|
||||
}
|
||||
|
||||
@@ -43,24 +54,27 @@ impl AskPassDelegate {
|
||||
password_prompt(prompt, channel, cx);
|
||||
}
|
||||
});
|
||||
Self { tx, _task: task }
|
||||
Self {
|
||||
tx,
|
||||
_task: task,
|
||||
executor: cx.background_executor().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn ask_password(&mut self, prompt: String) -> Result<EncryptedPassword> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.tx.send((prompt, tx)).await?;
|
||||
Ok(rx.await?)
|
||||
pub fn ask_password(&mut self, prompt: String) -> Task<Option<EncryptedPassword>> {
|
||||
let mut this_tx = self.tx.clone();
|
||||
self.executor.spawn(async move {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
this_tx.send((prompt, tx)).await.ok()?;
|
||||
rx.await.ok()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AskPassSession {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
script_path: std::path::PathBuf,
|
||||
#[cfg(target_os = "windows")]
|
||||
askpass_helper: String,
|
||||
#[cfg(target_os = "windows")]
|
||||
secret: std::sync::Arc<OnceLock<EncryptedPassword>>,
|
||||
_askpass_task: Task<()>,
|
||||
askpass_task: PasswordProxy,
|
||||
askpass_opened_rx: Option<oneshot::Receiver<()>>,
|
||||
askpass_kill_master_rx: Option<oneshot::Receiver<()>>,
|
||||
}
|
||||
@@ -75,104 +89,57 @@ impl AskPassSession {
|
||||
/// You must retain this session until the master process exits.
|
||||
#[must_use]
|
||||
pub async fn new(executor: &BackgroundExecutor, mut delegate: AskPassDelegate) -> Result<Self> {
|
||||
use net::async_net::UnixListener;
|
||||
use util::fs::make_file_executable;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let secret = std::sync::Arc::new(OnceLock::new());
|
||||
let temp_dir = tempfile::Builder::new().prefix("zed-askpass").tempdir()?;
|
||||
let askpass_socket = temp_dir.path().join("askpass.sock");
|
||||
let askpass_script_path = temp_dir.path().join(ASKPASS_SCRIPT_NAME);
|
||||
let (askpass_opened_tx, askpass_opened_rx) = oneshot::channel::<()>();
|
||||
let listener = UnixListener::bind(&askpass_socket).context("creating askpass socket")?;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let zed_path = util::get_shell_safe_zed_path()?;
|
||||
#[cfg(target_os = "windows")]
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("finding current executable path for use in askpass")?;
|
||||
|
||||
let askpass_opened_tx = Arc::new(Mutex::new(Some(askpass_opened_tx)));
|
||||
|
||||
let (askpass_kill_master_tx, askpass_kill_master_rx) = oneshot::channel::<()>();
|
||||
let mut kill_tx = Some(askpass_kill_master_tx);
|
||||
let kill_tx = Arc::new(Mutex::new(Some(askpass_kill_master_tx)));
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let askpass_secret = secret.clone();
|
||||
let askpass_task = executor.spawn(async move {
|
||||
let mut askpass_opened_tx = Some(askpass_opened_tx);
|
||||
let get_password = {
|
||||
let executor = executor.clone();
|
||||
|
||||
while let Ok((mut stream, _)) = listener.accept().await {
|
||||
if let Some(askpass_opened_tx) = askpass_opened_tx.take() {
|
||||
askpass_opened_tx.send(()).ok();
|
||||
}
|
||||
let mut buffer = Vec::new();
|
||||
let mut reader = BufReader::new(&mut stream);
|
||||
if reader.read_until(b'\0', &mut buffer).await.is_err() {
|
||||
buffer.clear();
|
||||
}
|
||||
let prompt = String::from_utf8_lossy(&buffer);
|
||||
if let Some(password) = delegate
|
||||
.ask_password(prompt.to_string())
|
||||
.await
|
||||
.context("getting askpass password")
|
||||
.log_err()
|
||||
{
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
askpass_secret.get_or_init(|| password.clone());
|
||||
move |prompt| {
|
||||
let prompt = delegate.ask_password(prompt);
|
||||
let kill_tx = kill_tx.clone();
|
||||
let askpass_opened_tx = askpass_opened_tx.clone();
|
||||
#[cfg(target_os = "windows")]
|
||||
let askpass_secret = askpass_secret.clone();
|
||||
executor.spawn(async move {
|
||||
if let Some(askpass_opened_tx) = askpass_opened_tx.lock().await.take() {
|
||||
askpass_opened_tx.send(()).ok();
|
||||
}
|
||||
if let Ok(decrypted) = decrypt(password) {
|
||||
stream.write_all(decrypted.as_bytes()).await.log_err();
|
||||
if let Some(password) = prompt.await {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
_ = askpass_secret.set(password.clone());
|
||||
}
|
||||
ControlFlow::Continue(Ok(password))
|
||||
} else {
|
||||
if let Some(kill_tx) = kill_tx.lock().await.take() {
|
||||
kill_tx.send(()).log_err();
|
||||
}
|
||||
ControlFlow::Break(())
|
||||
}
|
||||
} else {
|
||||
if let Some(kill_tx) = kill_tx.take() {
|
||||
kill_tx.send(()).log_err();
|
||||
}
|
||||
// note: we expect the caller to drop this task when it's done.
|
||||
// We need to keep the stream open until the caller is done to avoid
|
||||
// spurious errors from ssh.
|
||||
std::future::pending::<()>().await;
|
||||
drop(stream);
|
||||
}
|
||||
})
|
||||
}
|
||||
drop(temp_dir)
|
||||
});
|
||||
|
||||
// Create an askpass script that communicates back to this process.
|
||||
let askpass_script = generate_askpass_script(&zed_path, &askpass_socket);
|
||||
fs::write(&askpass_script_path, askpass_script)
|
||||
.await
|
||||
.with_context(|| format!("creating askpass script at {askpass_script_path:?}"))?;
|
||||
make_file_executable(&askpass_script_path).await?;
|
||||
#[cfg(target_os = "windows")]
|
||||
let askpass_helper = format!(
|
||||
"powershell.exe -ExecutionPolicy Bypass -File {}",
|
||||
askpass_script_path.display()
|
||||
);
|
||||
};
|
||||
let askpass_task = PasswordProxy::new(get_password, executor.clone()).await?;
|
||||
|
||||
Ok(Self {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
script_path: askpass_script_path,
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
secret,
|
||||
#[cfg(target_os = "windows")]
|
||||
askpass_helper,
|
||||
|
||||
_askpass_task: askpass_task,
|
||||
askpass_task,
|
||||
askpass_kill_master_rx: Some(askpass_kill_master_rx),
|
||||
askpass_opened_rx: Some(askpass_opened_rx),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub fn script_path(&self) -> impl AsRef<OsStr> {
|
||||
&self.script_path
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn script_path(&self) -> impl AsRef<OsStr> {
|
||||
&self.askpass_helper
|
||||
}
|
||||
|
||||
// This will run the askpass task forever, resolving as many authentication requests as needed.
|
||||
// The caller is responsible for examining the result of their own commands and cancelling this
|
||||
// future when this is no longer needed. Note that this can only be called once, but due to the
|
||||
@@ -204,8 +171,109 @@ impl AskPassSession {
|
||||
pub fn get_password(&self) -> Option<EncryptedPassword> {
|
||||
self.secret.get().cloned()
|
||||
}
|
||||
|
||||
pub fn script_path(&self) -> impl AsRef<OsStr> {
|
||||
self.askpass_task.script_path()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PasswordProxy {
|
||||
_task: Task<()>,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
askpass_script_path: std::path::PathBuf,
|
||||
#[cfg(target_os = "windows")]
|
||||
askpass_helper: String,
|
||||
}
|
||||
|
||||
impl PasswordProxy {
|
||||
pub async fn new(
|
||||
mut get_password: impl FnMut(String) -> Task<ControlFlow<(), Result<EncryptedPassword>>>
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Result<Self> {
|
||||
let temp_dir = tempfile::Builder::new().prefix("zed-askpass").tempdir()?;
|
||||
let askpass_socket = temp_dir.path().join("askpass.sock");
|
||||
let askpass_script_path = temp_dir.path().join(ASKPASS_SCRIPT_NAME);
|
||||
let current_exec =
|
||||
std::env::current_exe().context("Failed to determine current zed executable path.")?;
|
||||
|
||||
let askpass_program = ASKPASS_PROGRAM
|
||||
.get_or_init(|| current_exec)
|
||||
.try_shell_safe()
|
||||
.context("Failed to shell-escape Askpass program path.")?
|
||||
.to_string();
|
||||
// Create an askpass script that communicates back to this process.
|
||||
let askpass_script = generate_askpass_script(&askpass_program, &askpass_socket);
|
||||
let _task = executor.spawn(async move {
|
||||
maybe!(async move {
|
||||
let listener =
|
||||
UnixListener::bind(&askpass_socket).context("creating askpass socket")?;
|
||||
|
||||
while let Ok((mut stream, _)) = listener.accept().await {
|
||||
let mut buffer = Vec::new();
|
||||
let mut reader = BufReader::new(&mut stream);
|
||||
if reader.read_until(b'\0', &mut buffer).await.is_err() {
|
||||
buffer.clear();
|
||||
}
|
||||
let prompt = String::from_utf8_lossy(&buffer).into_owned();
|
||||
let password = get_password(prompt).await;
|
||||
match password {
|
||||
ControlFlow::Continue(password) => {
|
||||
if let Ok(password) = password
|
||||
&& let Ok(decrypted) =
|
||||
password.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)
|
||||
{
|
||||
stream.write_all(decrypted.as_bytes()).await.log_err();
|
||||
}
|
||||
}
|
||||
ControlFlow::Break(()) => {
|
||||
// note: we expect the caller to drop this task when it's done.
|
||||
// We need to keep the stream open until the caller is done to avoid
|
||||
// spurious errors from ssh.
|
||||
std::future::pending::<()>().await;
|
||||
drop(stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
drop(temp_dir);
|
||||
Result::<_, anyhow::Error>::Ok(())
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
});
|
||||
|
||||
fs::write(&askpass_script_path, askpass_script)
|
||||
.await
|
||||
.with_context(|| format!("creating askpass script at {askpass_script_path:?}"))?;
|
||||
make_file_executable(&askpass_script_path).await?;
|
||||
#[cfg(target_os = "windows")]
|
||||
let askpass_helper = format!(
|
||||
"powershell.exe -ExecutionPolicy Bypass -File {}",
|
||||
askpass_script_path.display()
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
_task,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
askpass_script_path,
|
||||
#[cfg(target_os = "windows")]
|
||||
askpass_helper,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn script_path(&self) -> impl AsRef<OsStr> {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
&self.askpass_script_path
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
&self.askpass_helper
|
||||
}
|
||||
}
|
||||
}
|
||||
/// The main function for when Zed is running in netcat mode for use in askpass.
|
||||
/// Called from both the remote server binary and the zed binary in their respective main functions.
|
||||
pub fn main(socket: &str) {
|
||||
@@ -252,12 +320,17 @@ pub fn main(socket: &str) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_askpass_program(path: std::path::PathBuf) {
|
||||
if ASKPASS_PROGRAM.set(path).is_err() {
|
||||
debug_panic!("askpass program has already been set");
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn generate_askpass_script(zed_path: &str, askpass_socket: &std::path::Path) -> String {
|
||||
fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Path) -> String {
|
||||
format!(
|
||||
"{shebang}\n{print_args} | {zed_exe} --askpass={askpass_socket} 2> /dev/null \n",
|
||||
zed_exe = zed_path,
|
||||
"{shebang}\n{print_args} | {askpass_program} --askpass={askpass_socket} 2> /dev/null \n",
|
||||
askpass_socket = askpass_socket.display(),
|
||||
print_args = "printf '%s\\0' \"$@\"",
|
||||
shebang = "#!/bin/sh",
|
||||
@@ -266,13 +339,12 @@ fn generate_askpass_script(zed_path: &str, askpass_socket: &std::path::Path) ->
|
||||
|
||||
#[inline]
|
||||
#[cfg(target_os = "windows")]
|
||||
fn generate_askpass_script(zed_path: &std::path::Path, askpass_socket: &std::path::Path) -> String {
|
||||
fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Path) -> String {
|
||||
format!(
|
||||
r#"
|
||||
$ErrorActionPreference = 'Stop';
|
||||
($args -join [char]0) | & "{zed_exe}" --askpass={askpass_socket} 2> $null
|
||||
($args -join [char]0) | & "{askpass_program}" --askpass={askpass_socket} 2> $null
|
||||
"#,
|
||||
zed_exe = zed_path.display(),
|
||||
askpass_socket = askpass_socket.display(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -21,27 +21,6 @@ type LengthWithoutPadding = u32;
|
||||
#[derive(Clone)]
|
||||
pub struct EncryptedPassword(Vec<u8>, LengthWithoutPadding);
|
||||
|
||||
pub trait ProcessExt {
|
||||
fn encrypted_env(&mut self, name: &str, value: EncryptedPassword) -> &mut Self;
|
||||
}
|
||||
|
||||
impl ProcessExt for smol::process::Command {
|
||||
fn encrypted_env(&mut self, name: &str, value: EncryptedPassword) -> &mut Self {
|
||||
if let Ok(password) = decrypt(value) {
|
||||
self.env(name, password);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<EncryptedPassword> for proto::AskPassResponse {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(pw: EncryptedPassword) -> Result<Self, Self::Error> {
|
||||
let pw = decrypt(pw)?;
|
||||
Ok(Self { response: pw })
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for EncryptedPassword {
|
||||
fn drop(&mut self) {
|
||||
self.0.zeroize();
|
||||
@@ -67,7 +46,7 @@ impl TryFrom<&str> for EncryptedPassword {
|
||||
unsafe {
|
||||
CryptProtectMemory(
|
||||
value.as_mut_ptr() as _,
|
||||
len,
|
||||
padded_length,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)?;
|
||||
}
|
||||
@@ -79,38 +58,45 @@ impl TryFrom<&str> for EncryptedPassword {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt(mut password: EncryptedPassword) -> Result<String> {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use anyhow::Context;
|
||||
use windows::Win32::Security::Cryptography::{
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE, CRYPTPROTECTMEMORY_SAME_PROCESS, CryptUnprotectMemory,
|
||||
};
|
||||
assert_eq!(
|
||||
password.0.len() % CRYPTPROTECTMEMORY_BLOCK_SIZE as usize,
|
||||
0,
|
||||
"Violated pre-condition (buffer size <{}> must be a multiple of CRYPTPROTECTMEMORY_BLOCK_SIZE <{}>) for CryptUnprotectMemory.",
|
||||
password.0.len(),
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE
|
||||
);
|
||||
if password.1 != 0 {
|
||||
unsafe {
|
||||
CryptUnprotectMemory(
|
||||
password.0.as_mut_ptr() as _,
|
||||
password.1,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)
|
||||
.context("while decrypting a SSH password")?
|
||||
/// Read the docs for [EncryptedPassword]; please take care of not storing the plaintext string in memory for extended
|
||||
/// periods of time.
|
||||
pub struct IKnowWhatIAmDoingAndIHaveReadTheDocs;
|
||||
|
||||
impl EncryptedPassword {
|
||||
pub fn decrypt(mut self, _: IKnowWhatIAmDoingAndIHaveReadTheDocs) -> Result<String> {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use anyhow::Context;
|
||||
use windows::Win32::Security::Cryptography::{
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE, CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
CryptUnprotectMemory,
|
||||
};
|
||||
assert_eq!(
|
||||
self.0.len() % CRYPTPROTECTMEMORY_BLOCK_SIZE as usize,
|
||||
0,
|
||||
"Violated pre-condition (buffer size <{}> must be a multiple of CRYPTPROTECTMEMORY_BLOCK_SIZE <{}>) for CryptUnprotectMemory.",
|
||||
self.0.len(),
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE
|
||||
);
|
||||
if self.1 != 0 {
|
||||
unsafe {
|
||||
CryptUnprotectMemory(
|
||||
self.0.as_mut_ptr() as _,
|
||||
self.0.len().try_into()?,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)
|
||||
.context("while decrypting a SSH password")?
|
||||
};
|
||||
|
||||
{
|
||||
// Remove padding
|
||||
_ = password.0.drain(password.1 as usize..);
|
||||
{
|
||||
// Remove padding
|
||||
_ = self.0.drain(self.1 as usize..);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(std::mem::take(&mut password.0))?)
|
||||
Ok(String::from_utf8(std::mem::take(&mut self.0))?)
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
Ok(String::from_utf8(std::mem::take(&mut self.0))?)
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
Ok(String::from_utf8(std::mem::take(&mut password.0))?)
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use assistant_slash_command::{
|
||||
use fuzzy::{PathMatch, StringMatchCandidate};
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use language::{
|
||||
Anchor, BufferSnapshot, DiagnosticEntry, DiagnosticSeverity, LspAdapterDelegate,
|
||||
Anchor, BufferSnapshot, DiagnosticEntryRef, DiagnosticSeverity, LspAdapterDelegate,
|
||||
OffsetRangeExt, ToOffset,
|
||||
};
|
||||
use project::{DiagnosticSummary, PathMatchCandidateSet, Project};
|
||||
@@ -367,7 +367,7 @@ pub fn collect_buffer_diagnostics(
|
||||
|
||||
fn collect_diagnostic(
|
||||
output: &mut SlashCommandOutput,
|
||||
entry: &DiagnosticEntry<Anchor>,
|
||||
entry: &DiagnosticEntryRef<'_, Anchor>,
|
||||
snapshot: &BufferSnapshot,
|
||||
include_warnings: bool,
|
||||
) {
|
||||
|
||||
@@ -17,7 +17,7 @@ use editor::{
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between, px,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between,
|
||||
};
|
||||
use indoc::formatdoc;
|
||||
use language::{
|
||||
@@ -1003,7 +1003,7 @@ impl ToolCard for EditFileToolCard {
|
||||
font_size: Some(
|
||||
TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_font_size(cx))
|
||||
.to_pixels(ThemeSettings::get_global(cx).agent_ui_font_size(cx))
|
||||
.into(),
|
||||
),
|
||||
..TextStyleRefinement::default()
|
||||
@@ -1102,7 +1102,7 @@ impl ToolCard for EditFileToolCard {
|
||||
.relative()
|
||||
.h_full()
|
||||
.when(!self.full_height_expanded, |editor_container| {
|
||||
editor_container.max_h(px(COLLAPSED_LINES as f32 * editor_line_height.0))
|
||||
editor_container.max_h(COLLAPSED_LINES as f32 * editor_line_height)
|
||||
})
|
||||
.overflow_hidden()
|
||||
.border_t_1()
|
||||
@@ -1161,7 +1161,7 @@ async fn build_buffer(
|
||||
LineEnding::normalize(&mut text);
|
||||
let text = Rope::from(text);
|
||||
let language = cx
|
||||
.update(|_cx| language_registry.language_for_file_path(&path))?
|
||||
.update(|_cx| language_registry.load_language_for_file_path(&path))?
|
||||
.await
|
||||
.ok();
|
||||
let buffer = cx.new(|cx| {
|
||||
|
||||
@@ -18,7 +18,7 @@ use portable_pty::{CommandBuilder, PtySize, native_pty_system};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsLocation};
|
||||
use std::{
|
||||
env,
|
||||
path::{Path, PathBuf},
|
||||
@@ -27,12 +27,13 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use task::{Shell, ShellBuilder};
|
||||
use terminal::terminal_settings::TerminalSettings;
|
||||
use terminal_view::TerminalView;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*};
|
||||
use util::{
|
||||
ResultExt, get_default_system_shell, markdown::MarkdownInlineCode, size::format_file_size,
|
||||
time::duration_alt_display,
|
||||
ResultExt, get_default_system_shell_preferring_bash, markdown::MarkdownInlineCode,
|
||||
size::format_file_size, time::duration_alt_display,
|
||||
};
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -119,17 +120,29 @@ impl Tool for TerminalTool {
|
||||
};
|
||||
|
||||
let cwd = working_dir.clone();
|
||||
let env = match &working_dir {
|
||||
let env = match &cwd {
|
||||
Some(dir) => project.update(cx, |project, cx| {
|
||||
project.directory_environment(dir.as_path().into(), cx)
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.as_path().into(), cx)
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
let remote_shell = project.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
});
|
||||
let shell = project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
.remote_client()
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})
|
||||
.unwrap_or_else(|| get_default_system_shell_preferring_bash());
|
||||
|
||||
let env = cx.spawn(async move |_| {
|
||||
let mut env = env.await.unwrap_or_default();
|
||||
@@ -142,12 +155,9 @@ impl Tool for TerminalTool {
|
||||
let build_cmd = {
|
||||
let input_command = input.command.clone();
|
||||
move || {
|
||||
ShellBuilder::new(
|
||||
remote_shell.as_deref(),
|
||||
&Shell::Program(get_default_system_shell()),
|
||||
)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(input_command.clone()), &[])
|
||||
ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(input_command), &[])
|
||||
}
|
||||
};
|
||||
|
||||
@@ -476,7 +486,7 @@ impl ToolCard for TerminalToolCard {
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.or_else(|| env::current_dir().ok())
|
||||
.map(|path| format!("{}", path.display()))
|
||||
.map(|path| path.display().to_string())
|
||||
.unwrap_or_else(|| "current directory".to_string());
|
||||
|
||||
let header = h_flex()
|
||||
@@ -694,7 +704,6 @@ mod tests {
|
||||
use serde_json::json;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use terminal::terminal_settings::TerminalSettings;
|
||||
use theme::ThemeSettings;
|
||||
use util::{ResultExt as _, test::TempTree};
|
||||
|
||||
use super::*;
|
||||
@@ -709,7 +718,7 @@ mod tests {
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
workspace::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
TerminalSettings::register(cx);
|
||||
EditorSettings::register(cx);
|
||||
});
|
||||
|
||||
@@ -42,7 +42,7 @@ pub struct AudioSettings {
|
||||
|
||||
/// Configuration of audio in Zed
|
||||
impl Settings for AudioSettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
let audio = &content.audio.as_ref().unwrap();
|
||||
AudioSettings {
|
||||
rodio_audio: audio.rodio_audio.unwrap(),
|
||||
|
||||
@@ -127,7 +127,7 @@ struct AutoUpdateSetting(bool);
|
||||
///
|
||||
/// Default: true
|
||||
impl Settings for AutoUpdateSetting {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
Self(content.auto_update.unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::{
|
||||
os::windows::process::CommandExt,
|
||||
path::Path,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -7,7 +6,6 @@ use std::{
|
||||
use anyhow::{Context as _, Result};
|
||||
use windows::Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
System::Threading::CREATE_NEW_PROCESS_GROUP,
|
||||
UI::WindowsAndMessaging::PostMessageW,
|
||||
};
|
||||
|
||||
@@ -38,6 +36,20 @@ pub(crate) const JOBS: &[Job] = &[
|
||||
std::fs::remove_file(&zed_wsl)
|
||||
.context(format!("Failed to remove old file {}", zed_wsl.display()))
|
||||
},
|
||||
|app_dir| {
|
||||
let open_console = app_dir.join("OpenConsole.exe");
|
||||
log::info!("Removing old file: {}", open_console.display());
|
||||
std::fs::remove_file(&open_console).context(format!(
|
||||
"Failed to remove old file {}",
|
||||
open_console.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let conpty = app_dir.join("conpty.dll");
|
||||
log::info!("Removing old file: {}", conpty.display());
|
||||
std::fs::remove_file(&conpty)
|
||||
.context(format!("Failed to remove old file {}", conpty.display()))
|
||||
},
|
||||
// Copy new files
|
||||
|app_dir| {
|
||||
let zed_executable_source = app_dir.join("install\\Zed.exe");
|
||||
@@ -87,6 +99,38 @@ pub(crate) const JOBS: &[Job] = &[
|
||||
zed_wsl_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let open_console_source = app_dir.join("install\\OpenConsole.exe");
|
||||
let open_console_dest = app_dir.join("OpenConsole.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
open_console_source.display(),
|
||||
open_console_dest.display()
|
||||
);
|
||||
std::fs::copy(&open_console_source, &open_console_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
open_console_source.display(),
|
||||
open_console_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let conpty_source = app_dir.join("install\\conpty.dll");
|
||||
let conpty_dest = app_dir.join("conpty.dll");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
conpty_source.display(),
|
||||
conpty_dest.display()
|
||||
);
|
||||
std::fs::copy(&conpty_source, &conpty_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
conpty_source.display(),
|
||||
conpty_dest.display()
|
||||
))
|
||||
},
|
||||
// Clean up installer folder and updates folder
|
||||
|app_dir| {
|
||||
let updates_folder = app_dir.join("updates");
|
||||
@@ -161,9 +205,7 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
|
||||
}
|
||||
if launch {
|
||||
#[allow(clippy::disallowed_methods, reason = "doesn't run in the main binary")]
|
||||
let _ = std::process::Command::new(app_dir.join("Zed.exe"))
|
||||
.creation_flags(CREATE_NEW_PROCESS_GROUP.0)
|
||||
.spawn();
|
||||
let _ = std::process::Command::new(app_dir.join("Zed.exe")).spawn();
|
||||
}
|
||||
log::info!("Update completed successfully");
|
||||
Ok(())
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use gpui::App;
|
||||
use settings::Settings;
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -8,17 +7,11 @@ pub struct CallSettings {
|
||||
}
|
||||
|
||||
impl Settings for CallSettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
let call = content.calls.clone().unwrap();
|
||||
CallSettings {
|
||||
mute_on_join: call.mute_on_join.unwrap(),
|
||||
share_on_join: call.share_on_join.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
fn import_from_vscode(
|
||||
_vscode: &settings::VsCodeSettings,
|
||||
_current: &mut settings::SettingsContent,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ default = []
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
askpass.workspace = true
|
||||
clap.workspace = true
|
||||
collections.workspace = true
|
||||
ipc-channel = "0.19"
|
||||
|
||||
@@ -116,6 +116,11 @@ struct Args {
|
||||
))]
|
||||
#[arg(long)]
|
||||
uninstall: bool,
|
||||
|
||||
/// Used for SSH/Git password authentication, to remove the need for netcat as a dependency,
|
||||
/// by having Zed act like netcat communicating over a Unix socket.
|
||||
#[arg(long, hide = true)]
|
||||
askpass: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
|
||||
@@ -203,6 +208,12 @@ fn main() -> Result<()> {
|
||||
}
|
||||
let args = Args::parse();
|
||||
|
||||
// `zed --askpass` Makes zed operate in nc/netcat mode for use with askpass
|
||||
if let Some(socket) = &args.askpass {
|
||||
askpass::main(socket);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Set custom data directory before any path operations
|
||||
let user_data_dir = args.user_data_dir.clone();
|
||||
if let Some(dir) = &user_data_dir {
|
||||
@@ -720,15 +731,15 @@ mod windows {
|
||||
Storage::FileSystem::{
|
||||
CreateFileW, FILE_FLAGS_AND_ATTRIBUTES, FILE_SHARE_MODE, OPEN_EXISTING, WriteFile,
|
||||
},
|
||||
System::Threading::{CREATE_NEW_PROCESS_GROUP, CreateMutexW},
|
||||
System::Threading::CreateMutexW,
|
||||
},
|
||||
core::HSTRING,
|
||||
};
|
||||
|
||||
use crate::{Detect, InstalledApp};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::ExitStatus;
|
||||
use std::{io, os::windows::process::CommandExt};
|
||||
|
||||
fn check_single_instance() -> bool {
|
||||
let mutex = unsafe {
|
||||
@@ -767,7 +778,6 @@ mod windows {
|
||||
fn launch(&self, ipc_url: String) -> anyhow::Result<()> {
|
||||
if check_single_instance() {
|
||||
std::process::Command::new(self.0.clone())
|
||||
.creation_flags(CREATE_NEW_PROCESS_GROUP.0)
|
||||
.arg(ipc_url)
|
||||
.spawn()?;
|
||||
} else {
|
||||
|
||||
@@ -101,7 +101,7 @@ pub struct ClientSettings {
|
||||
}
|
||||
|
||||
impl Settings for ClientSettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
if let Some(server_url) = &*ZED_SERVER_URL {
|
||||
return Self {
|
||||
server_url: server_url.clone(),
|
||||
@@ -133,7 +133,7 @@ impl ProxySettings {
|
||||
}
|
||||
|
||||
impl Settings for ProxySettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
Self {
|
||||
proxy: content.proxy.clone(),
|
||||
}
|
||||
@@ -519,7 +519,7 @@ pub struct TelemetrySettings {
|
||||
}
|
||||
|
||||
impl settings::Settings for TelemetrySettings {
|
||||
fn from_settings(content: &SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &SettingsContent) -> Self {
|
||||
Self {
|
||||
diagnostics: content.telemetry.as_ref().unwrap().diagnostics.unwrap(),
|
||||
metrics: content.telemetry.as_ref().unwrap().metrics.unwrap(),
|
||||
|
||||
@@ -17,5 +17,6 @@ cloud_llm_client.workspace = true
|
||||
indoc.workspace = true
|
||||
ordered-float.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
serde.workspace = true
|
||||
strum.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -5,6 +5,7 @@ use cloud_llm_client::predict_edits_v3::{self, Event, PromptFormat, ReferencedDe
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::Serialize;
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp::Reverse, collections::BinaryHeap, ops::Range, path::Path};
|
||||
@@ -75,7 +76,7 @@ pub enum DeclarationStyle {
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SectionLabels {
|
||||
pub excerpt_index: usize,
|
||||
pub section_ranges: Vec<(Arc<Path>, Range<usize>)>,
|
||||
|
||||
@@ -20,7 +20,5 @@ LLM_DATABASE_MAX_CONNECTIONS = 5
|
||||
LLM_API_SECRET = "llm-secret"
|
||||
OPENAI_API_KEY = "llm-secret"
|
||||
|
||||
# SLACK_PANICS_WEBHOOK = ""
|
||||
|
||||
# RUST_LOG=info
|
||||
# LOG_JSON=true
|
||||
|
||||
@@ -46,7 +46,6 @@ rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
reqwest_client.workspace = true
|
||||
rpc.workspace = true
|
||||
rustc-demangle.workspace = true
|
||||
scrypt = "0.11"
|
||||
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
semantic_version.workspace = true
|
||||
|
||||
@@ -214,11 +214,6 @@ spec:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: bucket
|
||||
- name: SLACK_PANICS_WEBHOOK
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: slack
|
||||
key: panics_webhook
|
||||
- name: COMPLETE_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR
|
||||
value: "1000"
|
||||
- name: SUPERMAVEN_ADMIN_API_KEY
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
alter table billing_subscriptions
|
||||
add column token_spend_in_cents integer,
|
||||
add column token_spend_in_cents_updated_at timestamp without time zone;
|
||||
@@ -1,8 +1,6 @@
|
||||
pub mod contributors;
|
||||
pub mod events;
|
||||
pub mod extensions;
|
||||
pub mod ips_file;
|
||||
pub mod slack;
|
||||
|
||||
use crate::{AppState, Error, Result, auth, db::UserId, rpc};
|
||||
use anyhow::Context as _;
|
||||
|
||||
@@ -1,33 +1,28 @@
|
||||
use super::ips_file::IpsFile;
|
||||
use crate::api::CloudflareIpCountryHeader;
|
||||
use crate::{AppState, Error, Result, api::slack};
|
||||
use crate::{AppState, Error, Result};
|
||||
use anyhow::anyhow;
|
||||
use aws_sdk_s3::primitives::ByteStream;
|
||||
use axum::{
|
||||
Extension, Router, TypedHeader,
|
||||
body::Bytes,
|
||||
headers::Header,
|
||||
http::{HeaderMap, HeaderName, StatusCode},
|
||||
http::{HeaderName, StatusCode},
|
||||
routing::post,
|
||||
};
|
||||
use chrono::Duration;
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use telemetry_events::{Event, EventRequestBody, Panic};
|
||||
use telemetry_events::{Event, EventRequestBody};
|
||||
use util::ResultExt;
|
||||
use uuid::Uuid;
|
||||
|
||||
const CRASH_REPORTS_BUCKET: &str = "zed-crash-reports";
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/telemetry/events", post(post_events))
|
||||
.route("/telemetry/crashes", post(post_crash))
|
||||
.route("/telemetry/crashes", post(post_panic))
|
||||
.route("/telemetry/panics", post(post_panic))
|
||||
.route("/telemetry/hangs", post(post_hang))
|
||||
.route("/telemetry/hangs", post(post_panic))
|
||||
}
|
||||
|
||||
pub struct ZedChecksumHeader(Vec<u8>);
|
||||
@@ -58,437 +53,12 @@ impl Header for ZedChecksumHeader {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn post_crash(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
headers: HeaderMap,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let report = IpsFile::parse(&body)?;
|
||||
let version_threshold = SemanticVersion::new(0, 123, 0);
|
||||
|
||||
let bundle_id = &report.header.bundle_id;
|
||||
let app_version = &report.app_version();
|
||||
|
||||
if bundle_id == "dev.zed.Zed-Dev" {
|
||||
log::error!("Crash uploads from {} are ignored.", bundle_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if app_version.is_none() || app_version.unwrap() < version_threshold {
|
||||
log::error!(
|
||||
"Crash uploads from {} are ignored.",
|
||||
report.header.app_version
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
let app_version = app_version.unwrap();
|
||||
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
let response = blob_store_client
|
||||
.head_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(report.header.incident_id.clone() + ".ips")
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if response.is_ok() {
|
||||
log::info!("We've already uploaded this crash");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(report.header.incident_id.clone() + ".ips")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let recent_panic_on: Option<i64> = headers
|
||||
.get("x-zed-panicked-on")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.and_then(|s| s.parse().ok());
|
||||
|
||||
let installation_id = headers
|
||||
.get("x-zed-installation-id")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut recent_panic = None;
|
||||
|
||||
if let Some(recent_panic_on) = recent_panic_on {
|
||||
let crashed_at = match report.timestamp() {
|
||||
Ok(t) => Some(t),
|
||||
Err(e) => {
|
||||
log::error!("Can't parse {}: {}", report.header.timestamp, e);
|
||||
None
|
||||
}
|
||||
};
|
||||
if crashed_at.is_some_and(|t| (t.timestamp_millis() - recent_panic_on).abs() <= 30000) {
|
||||
recent_panic = headers.get("x-zed-panic").and_then(|h| h.to_str().ok());
|
||||
}
|
||||
}
|
||||
|
||||
let description = report.description(recent_panic);
|
||||
let summary = report.backtrace_summary();
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %report.header.app_version,
|
||||
os_version = %report.header.os_version,
|
||||
bundle_id = %report.header.bundle_id,
|
||||
incident_id = %report.header.incident_id,
|
||||
installation_id = %installation_id,
|
||||
description = %description,
|
||||
backtrace = %summary,
|
||||
"crash report"
|
||||
);
|
||||
|
||||
if let Some(kinesis_client) = app.kinesis_client.clone()
|
||||
&& let Some(stream) = app.config.kinesis_stream.clone()
|
||||
{
|
||||
let properties = json!({
|
||||
"app_version": report.header.app_version,
|
||||
"os_version": report.header.os_version,
|
||||
"os_name": "macOS",
|
||||
"bundle_id": report.header.bundle_id,
|
||||
"incident_id": report.header.incident_id,
|
||||
"installation_id": installation_id,
|
||||
"description": description,
|
||||
"backtrace": summary,
|
||||
});
|
||||
let row = SnowflakeRow::new(
|
||||
"Crash Reported",
|
||||
None,
|
||||
false,
|
||||
Some(installation_id),
|
||||
properties,
|
||||
);
|
||||
let data = serde_json::to_vec(&row)?;
|
||||
kinesis_client
|
||||
.put_record()
|
||||
.stream_name(stream)
|
||||
.partition_key(row.insert_id.unwrap_or_default())
|
||||
.data(data.into())
|
||||
.send()
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
|
||||
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
|
||||
let payload = slack::WebhookBody::new(|w| {
|
||||
w.add_section(|s| s.text(slack::Text::markdown(description)))
|
||||
.add_section(|s| {
|
||||
s.add_field(slack::Text::markdown(format!(
|
||||
"*Version:*\n{} ({})",
|
||||
bundle_id, app_version
|
||||
)))
|
||||
.add_field({
|
||||
let hostname = app.config.blob_store_url.clone().unwrap_or_default();
|
||||
let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| {
|
||||
hostname.strip_prefix("http://").unwrap_or_default()
|
||||
});
|
||||
|
||||
slack::Text::markdown(format!(
|
||||
"*Incident:*\n<https://{}.{}/{}.ips|{}…>",
|
||||
CRASH_REPORTS_BUCKET,
|
||||
hostname,
|
||||
report.header.incident_id,
|
||||
report
|
||||
.header
|
||||
.incident_id
|
||||
.chars()
|
||||
.take(8)
|
||||
.collect::<String>(),
|
||||
))
|
||||
})
|
||||
})
|
||||
.add_rich_text(|r| r.add_preformatted(|p| p.add_text(summary)))
|
||||
});
|
||||
let payload_json = serde_json::to_string(&payload).map_err(|err| {
|
||||
log::error!("Failed to serialize payload to JSON: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(slack_panics_webhook)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(payload_json)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
log::error!("Failed to send payload to Slack: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
pub async fn post_panic() -> Result<()> {
|
||||
// as of v0.201.x crash/panic reporting is now done via Sentry.
|
||||
// The endpoint returns OK to avoid spurious errors for old clients.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn post_hang(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let Some(expected) = calculate_json_checksum(app.clone(), &body) else {
|
||||
return Err(Error::http(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"events not enabled".into(),
|
||||
))?;
|
||||
};
|
||||
|
||||
if checksum != expected {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid checksum".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
let incident_id = Uuid::new_v4().to_string();
|
||||
|
||||
// dump JSON into S3 so we can get frame offsets if we need to.
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(incident_id.clone() + ".hang.json")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let report: telemetry_events::HangReport = serde_json::from_slice(&body).map_err(|err| {
|
||||
log::error!("can't parse report json: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
let mut backtrace = "Possible hang detected on main thread:".to_string();
|
||||
let unknown = "<unknown>".to_string();
|
||||
for frame in report.backtrace.iter() {
|
||||
backtrace.push_str(&format!("\n{}", frame.symbols.first().unwrap_or(&unknown)));
|
||||
}
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %report.app_version.unwrap_or_default().to_string(),
|
||||
os_name = %report.os_name,
|
||||
os_version = report.os_version.unwrap_or_default(),
|
||||
incident_id = %incident_id,
|
||||
installation_id = %report.installation_id.unwrap_or_default(),
|
||||
backtrace = %backtrace,
|
||||
"hang report");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn post_panic(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let Some(expected) = calculate_json_checksum(app.clone(), &body) else {
|
||||
return Err(Error::http(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"events not enabled".into(),
|
||||
))?;
|
||||
};
|
||||
|
||||
if checksum != expected {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid checksum".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
let report: telemetry_events::PanicRequest = serde_json::from_slice(&body)
|
||||
.map_err(|_| Error::http(StatusCode::BAD_REQUEST, "invalid json".into()))?;
|
||||
let incident_id = uuid::Uuid::new_v4().to_string();
|
||||
let panic = report.panic;
|
||||
|
||||
if panic.os_name == "Linux" && panic.os_version == Some("1.0.0".to_string()) {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"invalid os version".into(),
|
||||
))?;
|
||||
}
|
||||
|
||||
if let Some(blob_store_client) = app.blob_store_client.as_ref() {
|
||||
let response = blob_store_client
|
||||
.head_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(incident_id.clone() + ".json")
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if response.is_ok() {
|
||||
log::info!("We've already uploaded this crash");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
blob_store_client
|
||||
.put_object()
|
||||
.bucket(CRASH_REPORTS_BUCKET)
|
||||
.key(incident_id.clone() + ".json")
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.body(ByteStream::from(body.to_vec()))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| log::error!("Failed to upload crash: {}", e))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let backtrace = panic.backtrace.join("\n");
|
||||
|
||||
tracing::error!(
|
||||
service = "client",
|
||||
version = %panic.app_version,
|
||||
os_name = %panic.os_name,
|
||||
os_version = %panic.os_version.clone().unwrap_or_default(),
|
||||
incident_id = %incident_id,
|
||||
installation_id = %panic.installation_id.clone().unwrap_or_default(),
|
||||
description = %panic.payload,
|
||||
backtrace = %backtrace,
|
||||
"panic report"
|
||||
);
|
||||
|
||||
if let Some(kinesis_client) = app.kinesis_client.clone()
|
||||
&& let Some(stream) = app.config.kinesis_stream.clone()
|
||||
{
|
||||
let properties = json!({
|
||||
"app_version": panic.app_version,
|
||||
"os_name": panic.os_name,
|
||||
"os_version": panic.os_version,
|
||||
"incident_id": incident_id,
|
||||
"installation_id": panic.installation_id,
|
||||
"description": panic.payload,
|
||||
"backtrace": backtrace,
|
||||
});
|
||||
let row = SnowflakeRow::new(
|
||||
"Panic Reported",
|
||||
None,
|
||||
false,
|
||||
panic.installation_id.clone(),
|
||||
properties,
|
||||
);
|
||||
let data = serde_json::to_vec(&row)?;
|
||||
kinesis_client
|
||||
.put_record()
|
||||
.stream_name(stream)
|
||||
.partition_key(row.insert_id.unwrap_or_default())
|
||||
.data(data.into())
|
||||
.send()
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
|
||||
if !report_to_slack(&panic) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
|
||||
let backtrace = if panic.backtrace.len() > 25 {
|
||||
let total = panic.backtrace.len();
|
||||
format!(
|
||||
"{}\n and {} more",
|
||||
panic
|
||||
.backtrace
|
||||
.iter()
|
||||
.take(20)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n"),
|
||||
total - 20
|
||||
)
|
||||
} else {
|
||||
panic.backtrace.join("\n")
|
||||
};
|
||||
let backtrace_with_summary = panic.payload + "\n" + &backtrace;
|
||||
|
||||
let version = if panic.release_channel == "nightly"
|
||||
&& !panic.app_version.contains("remote-server")
|
||||
&& let Some(sha) = panic.app_commit_sha
|
||||
{
|
||||
format!("Zed Nightly {}", sha.chars().take(7).collect::<String>())
|
||||
} else {
|
||||
panic.app_version
|
||||
};
|
||||
|
||||
let payload = slack::WebhookBody::new(|w| {
|
||||
w.add_section(|s| s.text(slack::Text::markdown("Panic request".to_string())))
|
||||
.add_section(|s| {
|
||||
s.add_field(slack::Text::markdown(format!("*Version:*\n {version} ",)))
|
||||
.add_field({
|
||||
let hostname = app.config.blob_store_url.clone().unwrap_or_default();
|
||||
let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| {
|
||||
hostname.strip_prefix("http://").unwrap_or_default()
|
||||
});
|
||||
|
||||
slack::Text::markdown(format!(
|
||||
"*{} {}:*\n<https://{}.{}/{}.json|{}…>",
|
||||
panic.os_name,
|
||||
panic.os_version.unwrap_or_default(),
|
||||
CRASH_REPORTS_BUCKET,
|
||||
hostname,
|
||||
incident_id,
|
||||
incident_id.chars().take(8).collect::<String>(),
|
||||
))
|
||||
})
|
||||
})
|
||||
.add_rich_text(|r| r.add_preformatted(|p| p.add_text(backtrace_with_summary)))
|
||||
});
|
||||
let payload_json = serde_json::to_string(&payload).map_err(|err| {
|
||||
log::error!("Failed to serialize payload to JSON: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(slack_panics_webhook)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(payload_json)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
log::error!("Failed to send payload to Slack: {err}");
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn report_to_slack(panic: &Panic) -> bool {
|
||||
// Panics on macOS should make their way to Slack as a crash report,
|
||||
// so we don't need to send them a second time via this channel.
|
||||
if panic.os_name == "macOS" {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic.payload.contains("ERROR_SURFACE_LOST_KHR") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic.payload.contains("ERROR_INITIALIZATION_FAILED") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic
|
||||
.payload
|
||||
.contains("GPU has crashed, and no debug information is available")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub async fn post_events(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
|
||||
@@ -1,346 +0,0 @@
|
||||
use anyhow::Context as _;
|
||||
use collections::HashMap;
|
||||
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IpsFile {
|
||||
pub header: Header,
|
||||
pub body: Body,
|
||||
}
|
||||
|
||||
impl IpsFile {
|
||||
pub fn parse(bytes: &[u8]) -> anyhow::Result<IpsFile> {
|
||||
let mut split = bytes.splitn(2, |&b| b == b'\n');
|
||||
let header_bytes = split.next().context("No header found")?;
|
||||
let header: Header = serde_json::from_slice(header_bytes).context("parsing header")?;
|
||||
|
||||
let body_bytes = split.next().context("No body found")?;
|
||||
|
||||
let body: Body = serde_json::from_slice(body_bytes).context("parsing body")?;
|
||||
Ok(IpsFile { header, body })
|
||||
}
|
||||
|
||||
pub fn faulting_thread(&self) -> Option<&Thread> {
|
||||
self.body.threads.get(self.body.faulting_thread? as usize)
|
||||
}
|
||||
|
||||
pub fn app_version(&self) -> Option<SemanticVersion> {
|
||||
self.header.app_version.parse().ok()
|
||||
}
|
||||
|
||||
pub fn timestamp(&self) -> anyhow::Result<chrono::DateTime<chrono::FixedOffset>> {
|
||||
chrono::DateTime::parse_from_str(&self.header.timestamp, "%Y-%m-%d %H:%M:%S%.f %#z")
|
||||
.map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn description(&self, panic: Option<&str>) -> String {
|
||||
let mut desc = if self.body.termination.indicator == "Abort trap: 6" {
|
||||
match panic {
|
||||
Some(panic_message) => format!("Panic `{}`", panic_message),
|
||||
None => "Crash `Abort trap: 6` (possible panic)".into(),
|
||||
}
|
||||
} else if let Some(msg) = &self.body.exception.message {
|
||||
format!("Exception `{}`", msg)
|
||||
} else {
|
||||
format!("Crash `{}`", self.body.termination.indicator)
|
||||
};
|
||||
if let Some(thread) = self.faulting_thread() {
|
||||
if let Some(queue) = thread.queue.as_ref() {
|
||||
desc += &format!(
|
||||
" on thread {} ({})",
|
||||
self.body.faulting_thread.unwrap_or_default(),
|
||||
queue
|
||||
);
|
||||
} else {
|
||||
desc += &format!(
|
||||
" on thread {} ({})",
|
||||
self.body.faulting_thread.unwrap_or_default(),
|
||||
thread.name.clone().unwrap_or_default()
|
||||
);
|
||||
}
|
||||
}
|
||||
desc
|
||||
}
|
||||
|
||||
pub fn backtrace_summary(&self) -> String {
|
||||
if let Some(thread) = self.faulting_thread() {
|
||||
let mut frames = thread
|
||||
.frames
|
||||
.iter()
|
||||
.filter_map(|frame| {
|
||||
if let Some(name) = &frame.symbol {
|
||||
if self.is_ignorable_frame(name) {
|
||||
return None;
|
||||
}
|
||||
Some(format!("{:#}", rustc_demangle::demangle(name)))
|
||||
} else if let Some(image) = self.body.used_images.get(frame.image_index) {
|
||||
Some(image.name.clone().unwrap_or("<unknown-image>".into()))
|
||||
} else {
|
||||
Some("<unknown>".into())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let total = frames.len();
|
||||
if total > 21 {
|
||||
frames = frames.into_iter().take(20).collect();
|
||||
frames.push(format!(" and {} more...", total - 20))
|
||||
}
|
||||
frames.join("\n")
|
||||
} else {
|
||||
"<no backtrace available>".into()
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ignorable_frame(&self, symbol: &String) -> bool {
|
||||
[
|
||||
"pthread_kill",
|
||||
"panic",
|
||||
"backtrace",
|
||||
"rust_begin_unwind",
|
||||
"abort",
|
||||
]
|
||||
.iter()
|
||||
.any(|s| symbol.contains(s))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Header {
|
||||
pub app_name: String,
|
||||
pub timestamp: String,
|
||||
pub app_version: String,
|
||||
pub slice_uuid: String,
|
||||
pub build_version: String,
|
||||
pub platform: i64,
|
||||
#[serde(rename = "bundleID", default)]
|
||||
pub bundle_id: String,
|
||||
pub share_with_app_devs: i64,
|
||||
pub is_first_party: i64,
|
||||
pub bug_type: String,
|
||||
pub os_version: String,
|
||||
pub roots_installed: i64,
|
||||
pub name: String,
|
||||
pub incident_id: String,
|
||||
}
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Body {
|
||||
pub uptime: i64,
|
||||
pub proc_role: String,
|
||||
pub version: i64,
|
||||
#[serde(rename = "userID")]
|
||||
pub user_id: i64,
|
||||
pub deploy_version: i64,
|
||||
pub model_code: String,
|
||||
#[serde(rename = "coalitionID")]
|
||||
pub coalition_id: i64,
|
||||
pub os_version: OsVersion,
|
||||
pub capture_time: String,
|
||||
pub code_signing_monitor: i64,
|
||||
pub incident: String,
|
||||
pub pid: i64,
|
||||
pub translated: bool,
|
||||
pub cpu_type: String,
|
||||
#[serde(rename = "roots_installed")]
|
||||
pub roots_installed: i64,
|
||||
#[serde(rename = "bug_type")]
|
||||
pub bug_type: String,
|
||||
pub proc_launch: String,
|
||||
pub proc_start_abs_time: i64,
|
||||
pub proc_exit_abs_time: i64,
|
||||
pub proc_name: String,
|
||||
pub proc_path: String,
|
||||
pub bundle_info: BundleInfo,
|
||||
pub store_info: StoreInfo,
|
||||
pub parent_proc: String,
|
||||
pub parent_pid: i64,
|
||||
pub coalition_name: String,
|
||||
pub crash_reporter_key: String,
|
||||
#[serde(rename = "codeSigningID")]
|
||||
pub code_signing_id: String,
|
||||
#[serde(rename = "codeSigningTeamID")]
|
||||
pub code_signing_team_id: String,
|
||||
pub code_signing_flags: i64,
|
||||
pub code_signing_validation_category: i64,
|
||||
pub code_signing_trust_level: i64,
|
||||
pub instruction_byte_stream: InstructionByteStream,
|
||||
pub sip: String,
|
||||
pub exception: Exception,
|
||||
pub termination: Termination,
|
||||
pub asi: Asi,
|
||||
pub ext_mods: ExtMods,
|
||||
pub faulting_thread: Option<i64>,
|
||||
pub threads: Vec<Thread>,
|
||||
pub used_images: Vec<UsedImage>,
|
||||
pub shared_cache: SharedCache,
|
||||
pub vm_summary: String,
|
||||
pub legacy_info: LegacyInfo,
|
||||
pub log_writing_signature: String,
|
||||
pub trial_info: TrialInfo,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct OsVersion {
|
||||
pub train: String,
|
||||
pub build: String,
|
||||
pub release_type: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct BundleInfo {
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
pub cfbundle_short_version_string: String,
|
||||
#[serde(rename = "CFBundleVersion")]
|
||||
pub cfbundle_version: String,
|
||||
#[serde(rename = "CFBundleIdentifier")]
|
||||
pub cfbundle_identifier: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct StoreInfo {
|
||||
pub device_identifier_for_vendor: String,
|
||||
pub third_party: bool,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct InstructionByteStream {
|
||||
#[serde(rename = "beforePC")]
|
||||
pub before_pc: String,
|
||||
#[serde(rename = "atPC")]
|
||||
pub at_pc: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Exception {
|
||||
pub codes: String,
|
||||
pub raw_codes: Vec<i64>,
|
||||
#[serde(rename = "type")]
|
||||
pub type_field: String,
|
||||
pub subtype: Option<String>,
|
||||
pub signal: String,
|
||||
pub port: Option<i64>,
|
||||
pub guard_id: Option<i64>,
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Termination {
|
||||
pub flags: i64,
|
||||
pub code: i64,
|
||||
pub namespace: String,
|
||||
pub indicator: String,
|
||||
pub by_proc: String,
|
||||
pub by_pid: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Asi {
|
||||
#[serde(rename = "libsystem_c.dylib")]
|
||||
pub libsystem_c_dylib: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ExtMods {
|
||||
pub caller: ExtMod,
|
||||
pub system: ExtMod,
|
||||
pub targeted: ExtMod,
|
||||
pub warnings: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ExtMod {
|
||||
#[serde(rename = "thread_create")]
|
||||
pub thread_create: i64,
|
||||
#[serde(rename = "thread_set_state")]
|
||||
pub thread_set_state: i64,
|
||||
#[serde(rename = "task_for_pid")]
|
||||
pub task_for_pid: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Thread {
|
||||
pub thread_state: HashMap<String, Value>,
|
||||
pub id: i64,
|
||||
pub triggered: Option<bool>,
|
||||
pub name: Option<String>,
|
||||
pub queue: Option<String>,
|
||||
pub frames: Vec<Frame>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Frame {
|
||||
pub image_offset: i64,
|
||||
pub symbol: Option<String>,
|
||||
pub symbol_location: Option<i64>,
|
||||
pub image_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct UsedImage {
|
||||
pub source: String,
|
||||
pub arch: Option<String>,
|
||||
pub base: i64,
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
pub cfbundle_short_version_string: Option<String>,
|
||||
#[serde(rename = "CFBundleIdentifier")]
|
||||
pub cfbundle_identifier: Option<String>,
|
||||
pub size: i64,
|
||||
pub uuid: String,
|
||||
pub path: Option<String>,
|
||||
pub name: Option<String>,
|
||||
#[serde(rename = "CFBundleVersion")]
|
||||
pub cfbundle_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct SharedCache {
|
||||
pub base: i64,
|
||||
pub size: i64,
|
||||
pub uuid: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct LegacyInfo {
|
||||
pub thread_triggered: ThreadTriggered,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct ThreadTriggered {
|
||||
pub name: String,
|
||||
pub queue: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct TrialInfo {
|
||||
pub rollouts: Vec<Rollout>,
|
||||
pub experiments: Vec<Value>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct Rollout {
|
||||
pub rollout_id: String,
|
||||
pub factor_pack_ids: HashMap<String, Value>,
|
||||
pub deployment_id: i64,
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// https://api.slack.com/reference/messaging/payload
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct WebhookBody {
|
||||
text: String,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
blocks: Vec<Block>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
thread_ts: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
mrkdwn: Option<bool>,
|
||||
}
|
||||
|
||||
impl WebhookBody {
|
||||
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
|
||||
f(Self::default())
|
||||
}
|
||||
|
||||
pub fn add_section(mut self, build: impl FnOnce(Section) -> Section) -> Self {
|
||||
self.blocks.push(Block::Section(build(Section::default())));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_rich_text(mut self, build: impl FnOnce(RichText) -> RichText) -> Self {
|
||||
self.blocks
|
||||
.push(Block::RichText(build(RichText::default())));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
/// https://api.slack.com/reference/block-kit/blocks
|
||||
pub enum Block {
|
||||
#[serde(rename = "section")]
|
||||
Section(Section),
|
||||
#[serde(rename = "rich_text")]
|
||||
RichText(RichText),
|
||||
// .... etc.
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#section
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct Section {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
text: Option<Text>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
fields: Vec<Text>,
|
||||
// fields, accessories...
|
||||
}
|
||||
|
||||
impl Section {
|
||||
pub fn text(mut self, text: Text) -> Self {
|
||||
self.text = Some(text);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_field(mut self, field: Text) -> Self {
|
||||
self.fields.push(field);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/composition-objects#text
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum Text {
|
||||
#[serde(rename = "plain_text")]
|
||||
PlainText { text: String, emoji: bool },
|
||||
#[serde(rename = "mrkdwn")]
|
||||
Markdown { text: String, verbatim: bool },
|
||||
}
|
||||
|
||||
impl Text {
|
||||
pub fn plain(s: String) -> Self {
|
||||
Self::PlainText {
|
||||
text: s,
|
||||
emoji: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn markdown(s: String) -> Self {
|
||||
Self::Markdown {
|
||||
text: s,
|
||||
verbatim: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
pub struct RichText {
|
||||
elements: Vec<RichTextObject>,
|
||||
}
|
||||
|
||||
impl RichText {
|
||||
pub fn new(f: impl FnOnce(Self) -> Self) -> Self {
|
||||
f(Self::default())
|
||||
}
|
||||
|
||||
pub fn add_preformatted(
|
||||
mut self,
|
||||
build: impl FnOnce(RichTextPreformatted) -> RichTextPreformatted,
|
||||
) -> Self {
|
||||
self.elements.push(RichTextObject::Preformatted(build(
|
||||
RichTextPreformatted::default(),
|
||||
)));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#rich_text
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RichTextObject {
|
||||
#[serde(rename = "rich_text_preformatted")]
|
||||
Preformatted(RichTextPreformatted),
|
||||
// etc.
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#rich_text_preformatted
|
||||
#[derive(Clone, Default, Serialize, Deserialize)]
|
||||
pub struct RichTextPreformatted {
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
elements: Vec<RichTextElement>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
border: Option<u8>,
|
||||
}
|
||||
|
||||
impl RichTextPreformatted {
|
||||
pub fn add_text(mut self, text: String) -> Self {
|
||||
self.elements.push(RichTextElement::Text { text });
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// https://api.slack.com/reference/block-kit/blocks#element-types
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum RichTextElement {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
// etc.
|
||||
}
|
||||
@@ -153,7 +153,6 @@ pub struct Config {
|
||||
pub prediction_api_key: Option<Arc<str>>,
|
||||
pub prediction_model: Option<Arc<str>>,
|
||||
pub zed_client_checksum_seed: Option<String>,
|
||||
pub slack_panics_webhook: Option<String>,
|
||||
pub auto_join_channel_id: Option<ChannelId>,
|
||||
pub supermaven_admin_api_key: Option<Arc<str>>,
|
||||
}
|
||||
@@ -204,7 +203,6 @@ impl Config {
|
||||
prediction_api_key: None,
|
||||
prediction_model: None,
|
||||
zed_client_checksum_seed: None,
|
||||
slack_panics_webhook: None,
|
||||
auto_join_channel_id: None,
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
|
||||
@@ -323,8 +323,8 @@ fn assert_remote_selections(
|
||||
let CollaboratorId::PeerId(peer_id) = s.collaborator_id else {
|
||||
panic!("unexpected collaborator id");
|
||||
};
|
||||
let start = s.selection.start.to_offset(&snapshot.buffer_snapshot);
|
||||
let end = s.selection.end.to_offset(&snapshot.buffer_snapshot);
|
||||
let start = s.selection.start.to_offset(snapshot.buffer_snapshot());
|
||||
let end = s.selection.end.to_offset(snapshot.buffer_snapshot());
|
||||
let user_id = collaborators.get(&peer_id).unwrap().user_id;
|
||||
let participant_index = hub.user_participant_indices(cx).get(&user_id).copied();
|
||||
(participant_index, start..end)
|
||||
|
||||
@@ -2041,6 +2041,10 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
});
|
||||
}
|
||||
|
||||
// This test started hanging on seed 2 after the theme settings
|
||||
// PR. The hypothesis is that it's been buggy for a while, but got lucky
|
||||
// on seeds.
|
||||
#[ignore]
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_inlay_hint_refresh_is_forwarded(
|
||||
cx_a: &mut TestAppContext,
|
||||
|
||||
@@ -84,7 +84,11 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
diff.update(cx_b, |diff, cx| {
|
||||
assert_eq!(
|
||||
diff.excerpt_paths(cx),
|
||||
vec!["changed.txt", "deleted.txt", "created.txt"]
|
||||
vec![
|
||||
rel_path("changed.txt").into_arc(),
|
||||
rel_path("deleted.txt").into_arc(),
|
||||
rel_path("created.txt").into_arc()
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -121,7 +125,11 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
diff.update(cx_b, |diff, cx| {
|
||||
assert_eq!(
|
||||
diff.excerpt_paths(cx),
|
||||
vec!["deleted.txt", "unchanged.txt", "created.txt"]
|
||||
vec![
|
||||
rel_path("deleted.txt").into_arc(),
|
||||
rel_path("unchanged.txt").into_arc(),
|
||||
rel_path("created.txt").into_arc()
|
||||
]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6514,14 +6514,8 @@ async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
|
||||
cx.simulate_keystrokes("cmd-n cmd-n cmd-n");
|
||||
cx.update(|window, _cx| window.refresh());
|
||||
|
||||
let tab_bounds = cx.debug_bounds("TAB-2").unwrap();
|
||||
let new_tab_button_bounds = cx.debug_bounds("ICON-Plus").unwrap();
|
||||
|
||||
assert!(
|
||||
tab_bounds.intersects(&new_tab_button_bounds),
|
||||
"Tab should overlap with the new tab button, if this is failing check if there's been a redesign!"
|
||||
);
|
||||
|
||||
cx.simulate_event(MouseDownEvent {
|
||||
button: MouseButton::Right,
|
||||
position: new_tab_button_bounds.center(),
|
||||
|
||||
@@ -183,9 +183,10 @@ pub async fn run_randomized_test<T: RandomizedTest>(
|
||||
|
||||
for (client, cx) in clients {
|
||||
cx.update(|cx| {
|
||||
let store = cx.remove_global::<SettingsStore>();
|
||||
let settings = cx.remove_global::<SettingsStore>();
|
||||
cx.clear_globals();
|
||||
cx.set_global(store);
|
||||
cx.set_global(settings);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
drop(client);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -172,6 +172,7 @@ impl TestServer {
|
||||
}
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
});
|
||||
@@ -599,7 +600,6 @@ impl TestServer {
|
||||
prediction_api_key: None,
|
||||
prediction_model: None,
|
||||
zed_client_checksum_seed: None,
|
||||
slack_panics_webhook: None,
|
||||
auto_join_channel_id: None,
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
|
||||
@@ -248,7 +248,7 @@ impl ChannelView {
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.snapshot(window, cx));
|
||||
|
||||
if let Some(outline) = snapshot.buffer_snapshot.outline(None)
|
||||
if let Some(outline) = snapshot.buffer_snapshot().outline(None)
|
||||
&& let Some(item) = outline
|
||||
.items
|
||||
.iter()
|
||||
@@ -305,7 +305,7 @@ impl ChannelView {
|
||||
|
||||
let mut closest_heading = None;
|
||||
|
||||
if let Some(outline) = snapshot.buffer_snapshot.outline(None) {
|
||||
if let Some(outline) = snapshot.buffer_snapshot().outline(None) {
|
||||
for item in outline.items {
|
||||
if item.range.start.to_display_point(&snapshot) > position {
|
||||
break;
|
||||
@@ -508,10 +508,6 @@ impl Item for ChannelView {
|
||||
}))
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _cx: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn navigate(
|
||||
&mut self,
|
||||
data: Box<dyn Any>,
|
||||
|
||||
@@ -922,7 +922,7 @@ impl CollabPanel {
|
||||
|
||||
ListItem::new(user.github_login.clone())
|
||||
.start_slot(Avatar::new(user.avatar_uri.clone()))
|
||||
.child(Label::new(user.github_login.clone()))
|
||||
.child(render_participant_name_and_handle(user))
|
||||
.toggle_state(is_selected)
|
||||
.end_slot(if is_pending {
|
||||
Label::new("Calling").color(Color::Muted).into_any_element()
|
||||
@@ -2505,7 +2505,7 @@ impl CollabPanel {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(github_login.clone()))
|
||||
.child(render_participant_name_and_handle(&contact.user))
|
||||
.when(calling, |el| {
|
||||
el.child(Label::new("Calling").color(Color::Muted))
|
||||
})
|
||||
@@ -2940,6 +2940,14 @@ fn render_tree_branch(
|
||||
.h(line_height)
|
||||
}
|
||||
|
||||
fn render_participant_name_and_handle(user: &User) -> impl IntoElement {
|
||||
Label::new(if let Some(ref display_name) = user.name {
|
||||
format!("{display_name} ({})", user.github_login)
|
||||
} else {
|
||||
user.github_login.to_string()
|
||||
})
|
||||
}
|
||||
|
||||
impl Render for CollabPanel {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
@@ -3170,8 +3178,8 @@ struct JoinChannelTooltip {
|
||||
}
|
||||
|
||||
impl Render for JoinChannelTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(window, cx, |container, _, cx| {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
tooltip_container(cx, |container, cx| {
|
||||
let participants = self
|
||||
.channel_store
|
||||
.read(cx)
|
||||
@@ -3183,7 +3191,7 @@ impl Render for JoinChannelTooltip {
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Avatar::new(participant.avatar_uri.clone()))
|
||||
.child(Label::new(participant.github_login.clone()))
|
||||
.child(render_participant_name_and_handle(participant))
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ pub struct NotificationPanelSettings {
|
||||
}
|
||||
|
||||
impl Settings for CollaborationPanelSettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut ui::App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
let panel = content.collaboration_panel.as_ref().unwrap();
|
||||
|
||||
Self {
|
||||
@@ -30,7 +30,7 @@ impl Settings for CollaborationPanelSettings {
|
||||
}
|
||||
|
||||
impl Settings for NotificationPanelSettings {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut ui::App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent) -> Self {
|
||||
let panel = content.notification_panel.as_ref().unwrap();
|
||||
return Self {
|
||||
button: panel.button.unwrap(),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
[package]
|
||||
name = "collections"
|
||||
name = "zed-collections"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
publish = true
|
||||
license = "Apache-2.0"
|
||||
description = "Standard collection type re-exports used by Zed and GPUI"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use dap_types::SteppingGranularity;
|
||||
use gpui::App;
|
||||
use settings::{Settings, SettingsContent};
|
||||
|
||||
pub struct DebuggerSettings {
|
||||
@@ -34,7 +33,7 @@ pub struct DebuggerSettings {
|
||||
}
|
||||
|
||||
impl Settings for DebuggerSettings {
|
||||
fn from_settings(content: &SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &SettingsContent) -> Self {
|
||||
let content = content.debugger.clone().unwrap();
|
||||
Self {
|
||||
stepping_granularity: dap_granularity_from_settings(
|
||||
|
||||
@@ -41,8 +41,8 @@ use serde_json::Value;
|
||||
use settings::Settings;
|
||||
use stack_frame_list::StackFrameList;
|
||||
use task::{
|
||||
BuildTaskDefinition, DebugScenario, ShellBuilder, SpawnInTerminal, TaskContext, ZedDebugConfig,
|
||||
substitute_variables_in_str,
|
||||
BuildTaskDefinition, DebugScenario, Shell, ShellBuilder, SpawnInTerminal, TaskContext,
|
||||
ZedDebugConfig, substitute_variables_in_str,
|
||||
};
|
||||
use terminal_view::TerminalView;
|
||||
use ui::{
|
||||
@@ -988,7 +988,7 @@ impl RunningState {
|
||||
(task, None)
|
||||
}
|
||||
};
|
||||
let Some(task) = task_template.resolve_task("debug-build-task", &task_context) else {
|
||||
let Some(mut task) = task_template.resolve_task("debug-build-task", &task_context) else {
|
||||
anyhow::bail!("Could not resolve task variables within a debug scenario");
|
||||
};
|
||||
|
||||
@@ -1025,7 +1025,11 @@ impl RunningState {
|
||||
None
|
||||
};
|
||||
|
||||
let builder = ShellBuilder::new(remote_shell.as_deref(), &task.resolved.shell);
|
||||
if let Some(remote_shell) = remote_shell && task.resolved.shell == Shell::System {
|
||||
task.resolved.shell = Shell::Program(remote_shell);
|
||||
}
|
||||
|
||||
let builder = ShellBuilder::new(&task.resolved.shell);
|
||||
let command_label = builder.command_label(task.resolved.command.as_deref().unwrap_or(""));
|
||||
let (command, args) =
|
||||
builder.build(task.resolved.command.clone(), &task.resolved.args);
|
||||
@@ -1228,7 +1232,6 @@ impl RunningState {
|
||||
|
||||
terminal.read_with(cx, |terminal, _| {
|
||||
terminal
|
||||
.pty_info
|
||||
.pid()
|
||||
.map(|pid| pid.as_u32())
|
||||
.context("Terminal was spawned but PID was not available")
|
||||
|
||||
@@ -9,7 +9,10 @@ use gpui::{
|
||||
Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState,
|
||||
Subscription, Task, WeakEntity, list,
|
||||
};
|
||||
use util::debug_panic;
|
||||
use util::{
|
||||
debug_panic,
|
||||
paths::{PathStyle, is_absolute},
|
||||
};
|
||||
|
||||
use crate::{StackTraceView, ToggleUserFrames};
|
||||
use language::PointUtf16;
|
||||
@@ -470,8 +473,12 @@ impl StackFrameList {
|
||||
stack_frame.source.as_ref().and_then(|s| {
|
||||
s.path
|
||||
.as_deref()
|
||||
.filter(|path| {
|
||||
// Since we do not know if we are debugging on the host or (a remote/WSL) target,
|
||||
// we need to check if either the path is absolute as Posix or Windows.
|
||||
is_absolute(path, PathStyle::Posix) || is_absolute(path, PathStyle::Windows)
|
||||
})
|
||||
.map(|path| Arc::<Path>::from(Path::new(path)))
|
||||
.filter(|path| path.is_absolute())
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1213,7 +1213,7 @@ impl VariableList {
|
||||
|
||||
let weak = cx.weak_entity();
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
let watcher_len = (self.list_handle.content_size().width.0 / 12.0).floor() - 3.0;
|
||||
let watcher_len = (f32::from(self.list_handle.content_size().width / 12.0).floor()) - 3.0;
|
||||
let watcher_len = watcher_len as usize;
|
||||
|
||||
div()
|
||||
|
||||
@@ -59,7 +59,7 @@ impl StackTraceView {
|
||||
|
||||
editor
|
||||
.snapshot(window, cx)
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.excerpt_containing(position..position)
|
||||
.map(|excerpt| excerpt.id())
|
||||
});
|
||||
@@ -259,7 +259,7 @@ impl StackTraceView {
|
||||
let mut is_first = true;
|
||||
|
||||
for (_, highlight) in self.highlights.iter().skip(active_idx) {
|
||||
let position = highlight.to_point(&snapshot.buffer_snapshot);
|
||||
let position = highlight.to_point(&snapshot.buffer_snapshot());
|
||||
let color = if is_first {
|
||||
is_first = false;
|
||||
first_color
|
||||
@@ -268,11 +268,11 @@ impl StackTraceView {
|
||||
};
|
||||
|
||||
let start = snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.clip_point(Point::new(position.row, 0), Bias::Left);
|
||||
let end = start + Point::new(1, 0);
|
||||
let start = snapshot.buffer_snapshot.anchor_before(start);
|
||||
let end = snapshot.buffer_snapshot.anchor_before(end);
|
||||
let start = snapshot.buffer_snapshot().anchor_before(start);
|
||||
let end = snapshot.buffer_snapshot().anchor_before(end);
|
||||
editor.highlight_rows::<DebugStackFrameLine>(
|
||||
start..end,
|
||||
color,
|
||||
@@ -354,10 +354,6 @@ impl Item for StackTraceView {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn set_nav_history(
|
||||
&mut self,
|
||||
nav_history: ItemNavHistory,
|
||||
|
||||
@@ -1604,7 +1604,7 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
|
||||
|
||||
let point = editor
|
||||
.snapshot(window, cx)
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.summary_for_anchor::<language::Point>(&active_debug_lines.first().unwrap().0.start);
|
||||
|
||||
assert_eq!(point.row, 1);
|
||||
@@ -1679,7 +1679,7 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
|
||||
|
||||
let point = editor
|
||||
.snapshot(window, cx)
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.summary_for_anchor::<language::Point>(&active_debug_lines.first().unwrap().0.start);
|
||||
|
||||
assert_eq!(point.row, 2);
|
||||
|
||||
@@ -341,8 +341,8 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC
|
||||
editor
|
||||
.highlighted_rows::<editor::ActiveDebugLine>()
|
||||
.map(|(range, _)| {
|
||||
let start = range.start.to_point(&snapshot.buffer_snapshot);
|
||||
let end = range.end.to_point(&snapshot.buffer_snapshot);
|
||||
let start = range.start.to_point(&snapshot.buffer_snapshot());
|
||||
let end = range.end.to_point(&snapshot.buffer_snapshot());
|
||||
start.row..end.row
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
@@ -404,8 +404,8 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC
|
||||
editor
|
||||
.highlighted_rows::<editor::ActiveDebugLine>()
|
||||
.map(|(range, _)| {
|
||||
let start = range.start.to_point(&snapshot.buffer_snapshot);
|
||||
let end = range.end.to_point(&snapshot.buffer_snapshot);
|
||||
let start = range.start.to_point(&snapshot.buffer_snapshot());
|
||||
let end = range.end.to_point(&snapshot.buffer_snapshot());
|
||||
start.row..end.row
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
|
||||
@@ -15,7 +15,7 @@ use gpui::{
|
||||
InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription,
|
||||
Task, WeakEntity, Window, actions, div,
|
||||
};
|
||||
use language::{Buffer, DiagnosticEntry, Point};
|
||||
use language::{Buffer, DiagnosticEntry, DiagnosticEntryRef, Point};
|
||||
use project::{
|
||||
DiagnosticSummary, Event, Project, ProjectItem, ProjectPath,
|
||||
project_settings::{DiagnosticSeverity, ProjectSettings},
|
||||
@@ -350,7 +350,7 @@ impl BufferDiagnosticsEditor {
|
||||
grouped
|
||||
.entry(entry.diagnostic.group_id)
|
||||
.or_default()
|
||||
.push(DiagnosticEntry {
|
||||
.push(DiagnosticEntryRef {
|
||||
range: entry.range.to_point(&buffer_snapshot),
|
||||
diagnostic: entry.diagnostic,
|
||||
})
|
||||
@@ -560,13 +560,16 @@ impl BufferDiagnosticsEditor {
|
||||
})
|
||||
}
|
||||
|
||||
fn set_diagnostics(&mut self, diagnostics: &Vec<DiagnosticEntry<Anchor>>) {
|
||||
self.diagnostics = diagnostics.clone();
|
||||
fn set_diagnostics(&mut self, diagnostics: &[DiagnosticEntryRef<'_, Anchor>]) {
|
||||
self.diagnostics = diagnostics
|
||||
.iter()
|
||||
.map(DiagnosticEntryRef::to_owned)
|
||||
.collect();
|
||||
}
|
||||
|
||||
fn diagnostics_are_unchanged(
|
||||
&self,
|
||||
diagnostics: &Vec<DiagnosticEntry<Anchor>>,
|
||||
diagnostics: &Vec<DiagnosticEntryRef<'_, Anchor>>,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> bool {
|
||||
if self.diagnostics.len() != diagnostics.len() {
|
||||
@@ -727,10 +730,6 @@ impl Item for BufferDiagnosticsEditor {
|
||||
self.multibuffer.read(cx).is_dirty(cx)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _cx: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn navigate(
|
||||
&mut self,
|
||||
data: Box<dyn Any>,
|
||||
|
||||
@@ -6,7 +6,7 @@ use editor::{
|
||||
hover_popover::diagnostics_markdown_style,
|
||||
};
|
||||
use gpui::{AppContext, Entity, Focusable, WeakEntity};
|
||||
use language::{BufferId, Diagnostic, DiagnosticEntry};
|
||||
use language::{BufferId, Diagnostic, DiagnosticEntryRef};
|
||||
use lsp::DiagnosticSeverity;
|
||||
use markdown::{Markdown, MarkdownElement};
|
||||
use settings::Settings;
|
||||
@@ -24,7 +24,7 @@ pub struct DiagnosticRenderer;
|
||||
|
||||
impl DiagnosticRenderer {
|
||||
pub fn diagnostic_blocks_for_group(
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
buffer_id: BufferId,
|
||||
diagnostics_editor: Option<Arc<dyn DiagnosticsToolbarEditor>>,
|
||||
cx: &mut App,
|
||||
@@ -35,7 +35,7 @@ impl DiagnosticRenderer {
|
||||
else {
|
||||
return Vec::new();
|
||||
};
|
||||
let primary = diagnostic_group[primary_ix].clone();
|
||||
let primary = &diagnostic_group[primary_ix];
|
||||
let group_id = primary.diagnostic.group_id;
|
||||
let mut results = vec![];
|
||||
for entry in diagnostic_group.iter() {
|
||||
@@ -123,7 +123,7 @@ impl DiagnosticRenderer {
|
||||
impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
fn render_group(
|
||||
&self,
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
buffer_id: BufferId,
|
||||
snapshot: EditorSnapshot,
|
||||
editor: WeakEntity<Editor>,
|
||||
@@ -138,7 +138,7 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
BlockProperties {
|
||||
placement: BlockPlacement::Near(
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.anchor_after(block.initial_range.start),
|
||||
),
|
||||
height: Some(1),
|
||||
@@ -152,19 +152,15 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
|
||||
fn render_hover(
|
||||
&self,
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
diagnostic_group: Vec<DiagnosticEntryRef<'_, Point>>,
|
||||
range: Range<Point>,
|
||||
buffer_id: BufferId,
|
||||
cx: &mut App,
|
||||
) -> Option<Entity<Markdown>> {
|
||||
let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx);
|
||||
blocks.into_iter().find_map(|block| {
|
||||
if block.initial_range == range {
|
||||
Some(block.markdown)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
blocks
|
||||
.into_iter()
|
||||
.find_map(|block| (block.initial_range == range).then(|| block.markdown))
|
||||
}
|
||||
|
||||
fn open_link(
|
||||
@@ -189,7 +185,7 @@ pub(crate) struct DiagnosticBlock {
|
||||
impl DiagnosticBlock {
|
||||
pub fn render_block(&self, editor: WeakEntity<Editor>, bcx: &BlockContext) -> AnyElement {
|
||||
let cx = &bcx.app;
|
||||
let status_colors = bcx.app.theme().status();
|
||||
let status_colors = cx.theme().status();
|
||||
|
||||
let max_width = bcx.em_width * 120.;
|
||||
|
||||
@@ -282,7 +278,7 @@ impl DiagnosticBlock {
|
||||
}
|
||||
} else if let Some(diagnostic) = editor
|
||||
.snapshot(window, cx)
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.diagnostic_group(buffer_id, group_id)
|
||||
.nth(ix)
|
||||
{
|
||||
|
||||
@@ -22,7 +22,8 @@ use gpui::{
|
||||
Subscription, Task, WeakEntity, Window, actions, div,
|
||||
};
|
||||
use language::{
|
||||
Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, Point, ToTreeSitterPoint,
|
||||
Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, DiagnosticEntryRef, Point,
|
||||
ToTreeSitterPoint,
|
||||
};
|
||||
use project::{
|
||||
DiagnosticSummary, Project, ProjectPath,
|
||||
@@ -412,8 +413,8 @@ impl ProjectDiagnosticsEditor {
|
||||
|
||||
fn diagnostics_are_unchanged(
|
||||
&self,
|
||||
existing: &Vec<DiagnosticEntry<text::Anchor>>,
|
||||
new: &Vec<DiagnosticEntry<text::Anchor>>,
|
||||
existing: &[DiagnosticEntry<text::Anchor>],
|
||||
new: &[DiagnosticEntryRef<'_, text::Anchor>],
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> bool {
|
||||
if existing.len() != new.len() {
|
||||
@@ -457,7 +458,13 @@ impl ProjectDiagnosticsEditor {
|
||||
}) {
|
||||
return true;
|
||||
}
|
||||
this.diagnostics.insert(buffer_id, diagnostics.clone());
|
||||
this.diagnostics.insert(
|
||||
buffer_id,
|
||||
diagnostics
|
||||
.iter()
|
||||
.map(DiagnosticEntryRef::to_owned)
|
||||
.collect(),
|
||||
);
|
||||
false
|
||||
})?;
|
||||
if unchanged {
|
||||
@@ -469,7 +476,7 @@ impl ProjectDiagnosticsEditor {
|
||||
grouped
|
||||
.entry(entry.diagnostic.group_id)
|
||||
.or_default()
|
||||
.push(DiagnosticEntry {
|
||||
.push(DiagnosticEntryRef {
|
||||
range: entry.range.to_point(&buffer_snapshot),
|
||||
diagnostic: entry.diagnostic,
|
||||
})
|
||||
@@ -709,10 +716,6 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
self.editor.for_each_project_item(cx, f)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn set_nav_history(
|
||||
&mut self,
|
||||
nav_history: ItemNavHistory,
|
||||
|
||||
@@ -863,20 +863,20 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
|
||||
21..=50 => mutated_diagnostics.update_in(cx, |diagnostics, window, cx| {
|
||||
diagnostics.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
if !snapshot.buffer_snapshot.is_empty() {
|
||||
let position = rng.random_range(0..snapshot.buffer_snapshot.len());
|
||||
let position = snapshot.buffer_snapshot.clip_offset(position, Bias::Left);
|
||||
if !snapshot.buffer_snapshot().is_empty() {
|
||||
let position = rng.random_range(0..snapshot.buffer_snapshot().len());
|
||||
let position = snapshot.buffer_snapshot().clip_offset(position, Bias::Left);
|
||||
log::info!(
|
||||
"adding inlay at {position}/{}: {:?}",
|
||||
snapshot.buffer_snapshot.len(),
|
||||
snapshot.buffer_snapshot.text(),
|
||||
snapshot.buffer_snapshot().len(),
|
||||
snapshot.buffer_snapshot().text(),
|
||||
);
|
||||
|
||||
editor.splice_inlays(
|
||||
&[],
|
||||
vec![Inlay::edit_prediction(
|
||||
post_inc(&mut next_inlay_id),
|
||||
snapshot.buffer_snapshot.anchor_before(position),
|
||||
snapshot.buffer_snapshot().anchor_before(position),
|
||||
Rope::from_iter(["Test inlay ", "next_inlay_id"]),
|
||||
)],
|
||||
cx,
|
||||
|
||||
@@ -14,12 +14,14 @@ use workspace::{StatusItemView, ToolbarItemEvent, Workspace, item::ItemHandle};
|
||||
|
||||
use crate::{Deploy, IncludeWarnings, ProjectDiagnosticsEditor};
|
||||
|
||||
/// The status bar item that displays diagnostic counts.
|
||||
pub struct DiagnosticIndicator {
|
||||
summary: project::DiagnosticSummary,
|
||||
active_editor: Option<WeakEntity<Editor>>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
current_diagnostic: Option<Diagnostic>,
|
||||
active_editor: Option<WeakEntity<Editor>>,
|
||||
_observe_active_editor: Option<Subscription>,
|
||||
|
||||
diagnostics_update: Task<()>,
|
||||
diagnostic_summary_update: Task<()>,
|
||||
}
|
||||
@@ -73,10 +75,9 @@ impl Render for DiagnosticIndicator {
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.go_to_next_diagnostic(window, cx);
|
||||
}))
|
||||
.into_any_element(),
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| this.go_to_next_diagnostic(window, cx)),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@@ -177,7 +178,8 @@ impl DiagnosticIndicator {
|
||||
.filter(|entry| !entry.range.is_empty())
|
||||
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||
.map(|entry| entry.diagnostic);
|
||||
if new_diagnostic != self.current_diagnostic {
|
||||
if new_diagnostic != self.current_diagnostic.as_ref() {
|
||||
let new_diagnostic = new_diagnostic.cloned();
|
||||
self.diagnostics_update =
|
||||
cx.spawn_in(window, async move |diagnostics_indicator, cx| {
|
||||
cx.background_executor()
|
||||
|
||||
@@ -75,12 +75,9 @@ impl Render for ToolbarControls {
|
||||
&ToggleDiagnosticsRefresh,
|
||||
))
|
||||
.on_click(cx.listener(move |toolbar_controls, _, _, cx| {
|
||||
match toolbar_controls.editor() {
|
||||
Some(editor) => {
|
||||
editor.stop_updating(cx);
|
||||
cx.notify();
|
||||
}
|
||||
None => {}
|
||||
if let Some(editor) = toolbar_controls.editor() {
|
||||
editor.stop_updating(cx);
|
||||
cx.notify();
|
||||
}
|
||||
})),
|
||||
)
|
||||
@@ -95,11 +92,10 @@ impl Render for ToolbarControls {
|
||||
&ToggleDiagnosticsRefresh,
|
||||
))
|
||||
.on_click(cx.listener({
|
||||
move |toolbar_controls, _, window, cx| match toolbar_controls
|
||||
.editor()
|
||||
{
|
||||
Some(editor) => editor.refresh_diagnostics(window, cx),
|
||||
None => {}
|
||||
move |toolbar_controls, _, window, cx| {
|
||||
if let Some(editor) = toolbar_controls.editor() {
|
||||
editor.refresh_diagnostics(window, cx)
|
||||
}
|
||||
}
|
||||
})),
|
||||
)
|
||||
@@ -110,9 +106,10 @@ impl Render for ToolbarControls {
|
||||
.icon_color(warning_color)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(warning_tooltip))
|
||||
.on_click(cx.listener(|this, _, window, cx| match &this.editor {
|
||||
Some(editor) => editor.toggle_warnings(window, cx),
|
||||
None => {}
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
if let Some(editor) = &this.editor {
|
||||
editor.toggle_warnings(window, cx)
|
||||
}
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
ordered-float.workspace = true
|
||||
postage.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -55,6 +55,13 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_file(&self) -> Option<&FileDeclaration> {
|
||||
match self {
|
||||
Declaration::Buffer { .. } => None,
|
||||
Declaration::File { declaration, .. } => Some(declaration),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn project_entry_id(&self) -> ProjectEntryId {
|
||||
match self {
|
||||
Declaration::File {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents;
|
||||
use collections::HashMap;
|
||||
use itertools::Itertools as _;
|
||||
use language::BufferSnapshot;
|
||||
use ordered_float::OrderedFloat;
|
||||
use serde::Serialize;
|
||||
use std::{cmp::Reverse, collections::HashMap, ops::Range};
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
use strum::EnumIter;
|
||||
use text::{Point, ToPoint};
|
||||
|
||||
@@ -251,6 +252,7 @@ fn score_declaration(
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
impl DeclarationScores {
|
||||
@@ -258,7 +260,7 @@ impl DeclarationScores {
|
||||
// TODO: handle truncation
|
||||
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let accuracy_score = if components.is_same_file {
|
||||
let retrieval = if components.is_same_file {
|
||||
// TODO: use declaration_line_distance_rank
|
||||
1.0 / components.same_file_declaration_count as f32
|
||||
} else {
|
||||
@@ -274,13 +276,14 @@ impl DeclarationScores {
|
||||
};
|
||||
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * accuracy_score * distance_score;
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
DeclarationScores {
|
||||
signature: combined_score * components.excerpt_vs_signature_weighted_overlap,
|
||||
// declaration score gets boosted both by being multiplied by 2 and by there being more
|
||||
// weighted overlap.
|
||||
declaration: 2.0 * combined_score * components.excerpt_vs_item_weighted_overlap,
|
||||
retrieval,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,11 @@ mod excerpt;
|
||||
mod outline;
|
||||
mod reference;
|
||||
mod syntax_index;
|
||||
mod text_similarity;
|
||||
pub mod text_similarity;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext as _, Entity, Task};
|
||||
use language::BufferSnapshot;
|
||||
use text::{Point, ToOffset as _};
|
||||
@@ -33,8 +36,10 @@ impl EditPredictionContext {
|
||||
cx: &mut App,
|
||||
) -> Task<Option<Self>> {
|
||||
if let Some(syntax_index) = syntax_index {
|
||||
let index_state = syntax_index.read_with(cx, |index, _cx| index.state().clone());
|
||||
let index_state =
|
||||
syntax_index.read_with(cx, |index, _cx| Arc::downgrade(index.state()));
|
||||
cx.background_spawn(async move {
|
||||
let index_state = index_state.upgrade()?;
|
||||
let index_state = index_state.lock().await;
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, Some(&index_state))
|
||||
})
|
||||
@@ -50,6 +55,26 @@ impl EditPredictionContext {
|
||||
buffer: &BufferSnapshot,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
) -> Option<Self> {
|
||||
Self::gather_context_with_references_fn(
|
||||
cursor_point,
|
||||
buffer,
|
||||
excerpt_options,
|
||||
index_state,
|
||||
references_in_excerpt,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn gather_context_with_references_fn(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
get_references: impl FnOnce(
|
||||
&EditPredictionExcerpt,
|
||||
&EditPredictionExcerptText,
|
||||
&BufferSnapshot,
|
||||
) -> HashMap<Identifier, Vec<Reference>>,
|
||||
) -> Option<Self> {
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
@@ -73,7 +98,7 @@ impl EditPredictionContext {
|
||||
let cursor_offset_in_excerpt = cursor_offset_in_file.saturating_sub(excerpt.range.start);
|
||||
|
||||
let declarations = if let Some(index_state) = index_state {
|
||||
let references = references_in_excerpt(&excerpt, &excerpt_text, buffer);
|
||||
let references = get_references(&excerpt, &excerpt_text, buffer);
|
||||
|
||||
scored_declarations(
|
||||
&index_state,
|
||||
@@ -237,7 +262,8 @@ mod tests {
|
||||
let lang_id = lang.id();
|
||||
language_registry.add(Arc::new(lang));
|
||||
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
|
||||
let file_indexing_parallelism = 2;
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
(project, index, lang_id)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use collections::HashMap;
|
||||
use language::BufferSnapshot;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use util::RangeExt;
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::{
|
||||
excerpt::{EditPredictionExcerpt, EditPredictionExcerptText},
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Reference {
|
||||
pub identifier: Identifier,
|
||||
pub range: Range<usize>,
|
||||
@@ -26,7 +26,7 @@ pub fn references_in_excerpt(
|
||||
excerpt_text: &EditPredictionExcerptText,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> HashMap<Identifier, Vec<Reference>> {
|
||||
let mut references = identifiers_in_range(
|
||||
let mut references = references_in_range(
|
||||
excerpt.range.clone(),
|
||||
excerpt_text.body.as_str(),
|
||||
ReferenceRegion::Nearby,
|
||||
@@ -38,7 +38,7 @@ pub fn references_in_excerpt(
|
||||
.iter()
|
||||
.zip(excerpt_text.parent_signatures.iter())
|
||||
{
|
||||
references.extend(identifiers_in_range(
|
||||
references.extend(references_in_range(
|
||||
range.clone(),
|
||||
text.as_str(),
|
||||
ReferenceRegion::Breadcrumb,
|
||||
@@ -46,7 +46,7 @@ pub fn references_in_excerpt(
|
||||
));
|
||||
}
|
||||
|
||||
let mut identifier_to_references: HashMap<Identifier, Vec<Reference>> = HashMap::new();
|
||||
let mut identifier_to_references: HashMap<Identifier, Vec<Reference>> = HashMap::default();
|
||||
for reference in references {
|
||||
identifier_to_references
|
||||
.entry(reference.identifier.clone())
|
||||
@@ -57,7 +57,7 @@ pub fn references_in_excerpt(
|
||||
}
|
||||
|
||||
/// Finds all nodes which have a "variable" match from the highlights query within the offset range.
|
||||
pub fn identifiers_in_range(
|
||||
pub fn references_in_range(
|
||||
range: Range<usize>,
|
||||
range_text: &str,
|
||||
reference_region: ReferenceRegion,
|
||||
@@ -120,7 +120,7 @@ mod test {
|
||||
use indoc::indoc;
|
||||
use language::{BufferSnapshot, Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
|
||||
|
||||
use crate::reference::{ReferenceRegion, identifiers_in_range};
|
||||
use crate::reference::{ReferenceRegion, references_in_range};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_identifier_node_truncated(cx: &mut TestAppContext) {
|
||||
@@ -136,7 +136,7 @@ mod test {
|
||||
let buffer = create_buffer(code, cx);
|
||||
|
||||
let range = 0..35;
|
||||
let references = identifiers_in_range(
|
||||
let references = references_in_range(
|
||||
range.clone(),
|
||||
&code[range],
|
||||
ReferenceRegion::Breadcrumb,
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::channel::mpsc;
|
||||
use futures::lock::Mutex;
|
||||
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
|
||||
use futures::{FutureExt as _, StreamExt, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, BufferEvent};
|
||||
use postage::stream::Stream as _;
|
||||
use project::buffer_store::{BufferStore, BufferStoreEvent};
|
||||
use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
|
||||
use project::{PathChange, Project, ProjectEntryId, ProjectPath};
|
||||
use slotmap::SlotMap;
|
||||
use std::iter;
|
||||
use std::ops::Range;
|
||||
use std::ops::{DerefMut, Range};
|
||||
use std::sync::Arc;
|
||||
use text::BufferId;
|
||||
use util::{RangeExt as _, debug_panic, some_or_debug_panic};
|
||||
@@ -17,42 +22,60 @@ use crate::declaration::{
|
||||
};
|
||||
use crate::outline::declarations_in_buffer;
|
||||
|
||||
// TODO
|
||||
//
|
||||
// * Also queue / debounce buffer changes. A challenge for this is that use of
|
||||
// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
|
||||
//
|
||||
// * Add a per language configuration for skipping indexing.
|
||||
|
||||
// Potential future improvements:
|
||||
//
|
||||
// * Prevent indexing of a large file from blocking the queue.
|
||||
//
|
||||
// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
|
||||
// references are present and their scores.
|
||||
//
|
||||
// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
|
||||
// file in a build dependency. Should not be editable in that case - but how to distinguish the case
|
||||
// where it should be editable?
|
||||
|
||||
// Potential future optimizations:
|
||||
//
|
||||
// * Cache of buffers for files
|
||||
// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
|
||||
// of priority system to the background executor could help - it's single threaded for now to avoid
|
||||
// interfering with other work.
|
||||
//
|
||||
// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
|
||||
// languages? Will also need to find line boundaries, but that can be done by scanning characters in
|
||||
// the flat representation.
|
||||
// * Parse files directly instead of loading into a Rope.
|
||||
//
|
||||
// - This would allow the task handling dirty_files to be done entirely on the background executor.
|
||||
//
|
||||
// - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
|
||||
// but that can be done by scanning characters in the flat representation.
|
||||
//
|
||||
// * Use something similar to slotmap without key versions.
|
||||
//
|
||||
// * Concurrent slotmap
|
||||
//
|
||||
// * Use queue for parsing
|
||||
|
||||
pub struct SyntaxIndex {
|
||||
state: Arc<Mutex<SyntaxIndexState>>,
|
||||
project: WeakEntity<Project>,
|
||||
initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SyntaxIndexState {
|
||||
declarations: SlotMap<DeclarationId, Declaration>,
|
||||
identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
|
||||
files: HashMap<ProjectEntryId, FileState>,
|
||||
buffers: HashMap<BufferId, BufferState>,
|
||||
dirty_files: HashMap<ProjectEntryId, ProjectPath>,
|
||||
dirty_files_tx: mpsc::Sender<()>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct FileState {
|
||||
declarations: Vec<DeclarationId>,
|
||||
task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -62,34 +85,110 @@ struct BufferState {
|
||||
}
|
||||
|
||||
impl SyntaxIndex {
|
||||
pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
|
||||
let mut this = Self {
|
||||
pub fn new(
|
||||
project: &Entity<Project>,
|
||||
file_indexing_parallelism: usize,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
assert!(file_indexing_parallelism > 0);
|
||||
let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
|
||||
let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
|
||||
postage::watch::channel();
|
||||
|
||||
let initial_state = SyntaxIndexState {
|
||||
declarations: SlotMap::default(),
|
||||
identifiers: HashMap::default(),
|
||||
files: HashMap::default(),
|
||||
buffers: HashMap::default(),
|
||||
dirty_files: HashMap::default(),
|
||||
dirty_files_tx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
let this = Self {
|
||||
project: project.downgrade(),
|
||||
state: Arc::new(Mutex::new(SyntaxIndexState::default())),
|
||||
state: Arc::new(Mutex::new(initial_state)),
|
||||
initial_file_indexing_done_rx,
|
||||
};
|
||||
|
||||
let worktree_store = project.read(cx).worktree_store();
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
|
||||
for worktree in worktree_store
|
||||
let initial_worktree_snapshots = worktree_store
|
||||
.read(cx)
|
||||
.worktrees()
|
||||
.map(|w| w.read(cx).snapshot())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
for entry in worktree.files(false, 0) {
|
||||
this.update_file(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
}
|
||||
.collect::<Vec<_>>();
|
||||
if !initial_worktree_snapshots.is_empty() {
|
||||
this.state.try_lock().unwrap()._file_indexing_task =
|
||||
Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
.chunks(chunk_size);
|
||||
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
|
||||
log::info!("Finished initial file indexing");
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
|
||||
let buffer_store = project.read(cx).buffer_store().clone();
|
||||
for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
|
||||
this.register_buffer(&buffer, cx);
|
||||
@@ -100,6 +199,63 @@ impl SyntaxIndex {
|
||||
this
|
||||
}
|
||||
|
||||
async fn update_dirty_files(
|
||||
this: &WeakEntity<Self>,
|
||||
dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
|
||||
mut cx: AsyncApp,
|
||||
) {
|
||||
for (entry_id, project_path) in dirty_files {
|
||||
let Ok(task) = this.update(&mut cx, |this, cx| {
|
||||
this.update_file(entry_id, project_path, cx)
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
task.await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
|
||||
if *self.initial_file_indexing_done_rx.borrow() {
|
||||
Task::ready(Ok(()))
|
||||
} else {
|
||||
let mut rx = self.initial_file_indexing_done_rx.clone();
|
||||
cx.background_spawn(async move {
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
Some(true) => return Ok(()),
|
||||
Some(false) => {}
|
||||
None => {
|
||||
return Err(anyhow!(
|
||||
"SyntaxIndex dropped while waiting for initial file indexing"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
|
||||
let state = self.state.clone();
|
||||
let project = self.project.clone();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let state = state.lock().await;
|
||||
let Some(project) = project.upgrade() else {
|
||||
return vec![];
|
||||
};
|
||||
project
|
||||
.read_with(cx, |project, cx| {
|
||||
state
|
||||
.files
|
||||
.keys()
|
||||
.filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_worktree_store_event(
|
||||
&mut self,
|
||||
_worktree_store: Entity<WorktreeStore>,
|
||||
@@ -112,22 +268,27 @@ impl SyntaxIndex {
|
||||
let state = Arc::downgrade(&self.state);
|
||||
let worktree_id = *worktree_id;
|
||||
let updated_entries_set = updated_entries_set.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.background_spawn(async move {
|
||||
let Some(state) = state.upgrade() else { return };
|
||||
let mut state = state.lock().await;
|
||||
for (path, entry_id, path_change) in updated_entries_set.iter() {
|
||||
if let PathChange::Removed = path_change {
|
||||
state.lock().await.files.remove(entry_id);
|
||||
state.files.remove(entry_id);
|
||||
state.dirty_files.remove(entry_id);
|
||||
} else {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
path: path.clone(),
|
||||
};
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_file(*entry_id, project_path, cx);
|
||||
})
|
||||
.ok();
|
||||
state.dirty_files.insert(*entry_id, project_path);
|
||||
}
|
||||
}
|
||||
match state.dirty_files_tx.try_send(()) {
|
||||
Err(err) if err.is_disconnected() => {
|
||||
log::error!("bug: syntax indexing queue is disconnected");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
@@ -177,7 +338,7 @@ impl SyntaxIndex {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
cx.observe_release(buffer, move |this, _buffer, cx| {
|
||||
this.with_state(cx, move |state| {
|
||||
@@ -208,8 +369,11 @@ impl SyntaxIndex {
|
||||
}
|
||||
}
|
||||
|
||||
fn update_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
let buffer = buffer_entity.read(cx);
|
||||
if buffer.language().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(project_entry_id) =
|
||||
project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
|
||||
@@ -229,70 +393,64 @@ impl SyntaxIndex {
|
||||
}
|
||||
});
|
||||
|
||||
let parse_task = cx.background_spawn(async move {
|
||||
let snapshot = snapshot_task.await?;
|
||||
let rope = snapshot.text.as_rope().clone();
|
||||
let state = Arc::downgrade(&self.state);
|
||||
let task = cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.text.as_rope();
|
||||
|
||||
anyhow::Ok((
|
||||
declarations_in_buffer(&snapshot)
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
(
|
||||
item.parent_index,
|
||||
BufferDeclaration::from_outline(item, &rope),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
rope,
|
||||
))
|
||||
});
|
||||
|
||||
let task = cx.spawn({
|
||||
async move |this, cx| {
|
||||
let Ok((declarations, rope)) = parse_task.await else {
|
||||
return;
|
||||
};
|
||||
|
||||
this.update(cx, move |this, cx| {
|
||||
this.with_state(cx, move |state| {
|
||||
let buffer_state = state
|
||||
.buffers
|
||||
.entry(buffer_id)
|
||||
.or_insert_with(Default::default);
|
||||
|
||||
SyntaxIndexState::remove_buffer_declarations(
|
||||
&buffer_state.declarations,
|
||||
&mut state.declarations,
|
||||
&mut state.identifiers,
|
||||
);
|
||||
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent = parent_index
|
||||
.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::Buffer {
|
||||
rope: rope.clone(),
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
|
||||
buffer_state.declarations = new_ids;
|
||||
});
|
||||
let declarations = declarations_in_buffer(&snapshot)
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
(
|
||||
item.parent_index,
|
||||
BufferDeclaration::from_outline(item, &rope),
|
||||
)
|
||||
})
|
||||
.ok();
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let state = state.deref_mut();
|
||||
|
||||
let buffer_state = state
|
||||
.buffers
|
||||
.entry(buffer_id)
|
||||
.or_insert_with(Default::default);
|
||||
|
||||
SyntaxIndexState::remove_buffer_declarations(
|
||||
&buffer_state.declarations,
|
||||
&mut state.declarations,
|
||||
&mut state.identifiers,
|
||||
);
|
||||
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent =
|
||||
parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::Buffer {
|
||||
rope: rope.clone(),
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
|
||||
buffer_state.declarations = new_ids;
|
||||
});
|
||||
|
||||
self.with_state(cx, move |state| {
|
||||
@@ -309,28 +467,53 @@ impl SyntaxIndex {
|
||||
entry_id: ProjectEntryId,
|
||||
project_path: ProjectPath,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
) -> Task<()> {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
return Task::ready(());
|
||||
};
|
||||
let project = project.read(cx);
|
||||
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
|
||||
return;
|
||||
|
||||
let language_registry = project.languages();
|
||||
let Some(available_language) =
|
||||
language_registry.language_for_file_path(project_path.path.as_std_path())
|
||||
else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let language = if let Some(Ok(Ok(language))) = language_registry
|
||||
.load_language(&available_language)
|
||||
.now_or_never()
|
||||
{
|
||||
if language
|
||||
.grammar()
|
||||
.is_none_or(|grammar| grammar.outline_config.is_none())
|
||||
{
|
||||
return Task::ready(());
|
||||
}
|
||||
future::Either::Left(async { Ok(language) })
|
||||
} else {
|
||||
let language_registry = language_registry.clone();
|
||||
future::Either::Right(async move {
|
||||
anyhow::Ok(
|
||||
language_registry
|
||||
.load_language(&available_language)
|
||||
.await??,
|
||||
)
|
||||
})
|
||||
};
|
||||
|
||||
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let language_registry = project.languages().clone();
|
||||
|
||||
let snapshot_task = worktree.update(cx, |worktree, cx| {
|
||||
let load_task = worktree.load_file(&project_path.path, cx);
|
||||
cx.spawn(async move |_this, cx| {
|
||||
let loaded_file = load_task.await?;
|
||||
let language = language_registry
|
||||
.language_for_file_path(&project_path.path.as_std_path())
|
||||
.await
|
||||
.ok();
|
||||
let language = language.await?;
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
let mut buffer = Buffer::local(loaded_file.text, cx);
|
||||
buffer.set_language(language, cx);
|
||||
buffer.set_language(Some(language), cx);
|
||||
buffer
|
||||
})?;
|
||||
|
||||
@@ -343,75 +526,58 @@ impl SyntaxIndex {
|
||||
})
|
||||
});
|
||||
|
||||
let parse_task = cx.background_spawn(async move {
|
||||
let snapshot = snapshot_task.await?;
|
||||
let state = Arc::downgrade(&self.state);
|
||||
cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.as_rope();
|
||||
let declarations = declarations_in_buffer(&snapshot)
|
||||
.into_iter()
|
||||
.map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
|
||||
.collect::<Vec<_>>();
|
||||
anyhow::Ok(declarations)
|
||||
});
|
||||
|
||||
let task = cx.spawn({
|
||||
async move |this, cx| {
|
||||
// TODO: how to handle errors?
|
||||
let Ok(declarations) = parse_task.await else {
|
||||
return;
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let state = state.deref_mut();
|
||||
|
||||
let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
|
||||
for old_declaration_id in &file_state.declarations {
|
||||
let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
|
||||
debug_panic!("declaration not found");
|
||||
continue;
|
||||
};
|
||||
this.update(cx, |this, cx| {
|
||||
this.with_state(cx, move |state| {
|
||||
let file_state =
|
||||
state.files.entry(entry_id).or_insert_with(Default::default);
|
||||
|
||||
for old_declaration_id in &file_state.declarations {
|
||||
let Some(declaration) = state.declarations.remove(*old_declaration_id)
|
||||
else {
|
||||
debug_panic!("declaration not found");
|
||||
continue;
|
||||
};
|
||||
if let Some(identifier_declarations) =
|
||||
state.identifiers.get_mut(declaration.identifier())
|
||||
{
|
||||
identifier_declarations.remove(old_declaration_id);
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent = parent_index
|
||||
.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
|
||||
file_state.declarations = new_ids;
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
if let Some(identifier_declarations) =
|
||||
state.identifiers.get_mut(declaration.identifier())
|
||||
{
|
||||
identifier_declarations.remove(old_declaration_id);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
self.with_state(cx, move |state| {
|
||||
state
|
||||
.files
|
||||
.entry(entry_id)
|
||||
.or_insert_with(Default::default)
|
||||
.task = Some(task);
|
||||
});
|
||||
let mut new_ids = Vec::with_capacity(declarations.len());
|
||||
state.declarations.reserve(declarations.len());
|
||||
for (parent_index, mut declaration) in declarations {
|
||||
declaration.parent =
|
||||
parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
|
||||
|
||||
let identifier = declaration.identifier.clone();
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
state
|
||||
.identifiers
|
||||
.entry(identifier)
|
||||
.or_default()
|
||||
.insert(declaration_id);
|
||||
}
|
||||
file_state.declarations = new_ids;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -576,13 +742,13 @@ mod tests {
|
||||
let decls = index_state.declarations_for_identifier::<8>(&main);
|
||||
assert_eq!(decls.len(), 2);
|
||||
|
||||
let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
|
||||
assert_eq!(decl.identifier, main.clone());
|
||||
assert_eq!(decl.item_range, 32..280);
|
||||
|
||||
let decl = expect_file_decl("a.rs", &decls[1].1, &project, cx);
|
||||
let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
|
||||
assert_eq!(decl.identifier, main);
|
||||
assert_eq!(decl.item_range, 0..98);
|
||||
|
||||
let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
|
||||
assert_eq!(decl.identifier, main.clone());
|
||||
assert_eq!(decl.item_range, 32..280);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -718,8 +884,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let decls = index_state.declarations_for_identifier::<8>(&main);
|
||||
assert_eq!(decls.len(), 2);
|
||||
expect_file_decl("c.rs", &decls[0].1, &project, cx);
|
||||
expect_file_decl("a.rs", &decls[1].1, &project, cx);
|
||||
expect_file_decl("a.rs", &decls[0].1, &project, cx);
|
||||
expect_file_decl("c.rs", &decls[1].1, &project, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -852,7 +1018,8 @@ mod tests {
|
||||
let lang_id = lang.id();
|
||||
language_registry.add(Arc::new(lang));
|
||||
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
|
||||
let file_indexing_parallelism = 2;
|
||||
let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
(project, index, lang_id)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{App, Context, Entity, Window};
|
||||
use language::Language;
|
||||
use project::lsp_store::lsp_ext_command::SwitchSourceHeaderResult;
|
||||
use rpc::proto;
|
||||
use url::Url;
|
||||
use util::paths::PathStyle;
|
||||
use workspace::{OpenOptions, OpenVisible};
|
||||
|
||||
@@ -77,16 +78,17 @@ pub fn switch_source_header(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let goto = Url::parse(&switch_source_header.0).with_context(|| {
|
||||
format!(
|
||||
"Parsing URL \"{}\" returned from switch source/header failed",
|
||||
switch_source_header.0
|
||||
)
|
||||
})?;
|
||||
let goto = switch_source_header
|
||||
.0
|
||||
.strip_prefix("file://")
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Parsing file url \"{}\" returned from switch source/header failed",
|
||||
switch_source_header.0
|
||||
)
|
||||
})?;
|
||||
|
||||
let path = goto
|
||||
.to_file_path()
|
||||
.map_err(|()| anyhow::anyhow!("URL conversion to file path failed for \"{goto}\""))?;
|
||||
let path = PathBuf::from(goto);
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
|
||||
@@ -170,20 +170,15 @@ impl DisplayMap {
|
||||
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let edits = self.buffer_subscription.consume().into_inner();
|
||||
let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
|
||||
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits);
|
||||
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot, edits);
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size);
|
||||
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot, edits, tab_size);
|
||||
let (wrap_snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx));
|
||||
let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits).snapshot;
|
||||
.update(cx, |map, cx| map.sync(tab_snapshot, edits, cx));
|
||||
let block_snapshot = self.block_map.read(wrap_snapshot, edits).snapshot;
|
||||
|
||||
DisplaySnapshot {
|
||||
buffer_snapshot: self.buffer.read(cx).snapshot(cx),
|
||||
fold_snapshot,
|
||||
inlay_snapshot,
|
||||
tab_snapshot,
|
||||
wrap_snapshot,
|
||||
block_snapshot,
|
||||
diagnostics_max_severity: self.diagnostics_max_severity,
|
||||
crease_snapshot: self.crease_map.snapshot(),
|
||||
@@ -198,10 +193,10 @@ impl DisplayMap {
|
||||
pub fn set_state(&mut self, other: &DisplaySnapshot, cx: &mut Context<Self>) {
|
||||
self.fold(
|
||||
other
|
||||
.folds_in_range(0..other.buffer_snapshot.len())
|
||||
.folds_in_range(0..other.buffer_snapshot().len())
|
||||
.map(|fold| {
|
||||
Crease::simple(
|
||||
fold.range.to_offset(&other.buffer_snapshot),
|
||||
fold.range.to_offset(other.buffer_snapshot()),
|
||||
fold.placeholder.clone(),
|
||||
)
|
||||
})
|
||||
@@ -762,12 +757,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DisplaySnapshot {
|
||||
pub buffer_snapshot: MultiBufferSnapshot,
|
||||
pub fold_snapshot: FoldSnapshot,
|
||||
pub crease_snapshot: CreaseSnapshot,
|
||||
inlay_snapshot: InlaySnapshot,
|
||||
tab_snapshot: TabSnapshot,
|
||||
wrap_snapshot: WrapSnapshot,
|
||||
block_snapshot: BlockSnapshot,
|
||||
text_highlights: TextHighlights,
|
||||
inlay_highlights: InlayHighlights,
|
||||
@@ -776,15 +766,44 @@ pub struct DisplaySnapshot {
|
||||
diagnostics_max_severity: DiagnosticSeverity,
|
||||
pub(crate) fold_placeholder: FoldPlaceholder,
|
||||
}
|
||||
|
||||
impl DisplaySnapshot {
|
||||
pub fn wrap_snapshot(&self) -> &WrapSnapshot {
|
||||
&self.block_snapshot.wrap_snapshot
|
||||
}
|
||||
pub fn tab_snapshot(&self) -> &TabSnapshot {
|
||||
&self.block_snapshot.wrap_snapshot.tab_snapshot
|
||||
}
|
||||
|
||||
pub fn fold_snapshot(&self) -> &FoldSnapshot {
|
||||
&self.block_snapshot.wrap_snapshot.tab_snapshot.fold_snapshot
|
||||
}
|
||||
|
||||
pub fn inlay_snapshot(&self) -> &InlaySnapshot {
|
||||
&self
|
||||
.block_snapshot
|
||||
.wrap_snapshot
|
||||
.tab_snapshot
|
||||
.fold_snapshot
|
||||
.inlay_snapshot
|
||||
}
|
||||
|
||||
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
|
||||
&self
|
||||
.block_snapshot
|
||||
.wrap_snapshot
|
||||
.tab_snapshot
|
||||
.fold_snapshot
|
||||
.inlay_snapshot
|
||||
.buffer
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn fold_count(&self) -> usize {
|
||||
self.fold_snapshot.fold_count()
|
||||
self.fold_snapshot().fold_count()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.buffer_snapshot.len() == 0
|
||||
self.buffer_snapshot().len() == 0
|
||||
}
|
||||
|
||||
pub fn row_infos(&self, start_row: DisplayRow) -> impl Iterator<Item = RowInfo> + '_ {
|
||||
@@ -792,16 +811,16 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn widest_line_number(&self) -> u32 {
|
||||
self.buffer_snapshot.widest_line_number()
|
||||
self.buffer_snapshot().widest_line_number()
|
||||
}
|
||||
|
||||
pub fn prev_line_boundary(&self, mut point: MultiBufferPoint) -> (Point, DisplayPoint) {
|
||||
loop {
|
||||
let mut inlay_point = self.inlay_snapshot.to_inlay_point(point);
|
||||
let mut fold_point = self.fold_snapshot.to_fold_point(inlay_point, Bias::Left);
|
||||
let mut inlay_point = self.inlay_snapshot().to_inlay_point(point);
|
||||
let mut fold_point = self.fold_snapshot().to_fold_point(inlay_point, Bias::Left);
|
||||
fold_point.0.column = 0;
|
||||
inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
|
||||
point = self.inlay_snapshot.to_buffer_point(inlay_point);
|
||||
inlay_point = fold_point.to_inlay_point(self.fold_snapshot());
|
||||
point = self.inlay_snapshot().to_buffer_point(inlay_point);
|
||||
|
||||
let mut display_point = self.point_to_display_point(point, Bias::Left);
|
||||
*display_point.column_mut() = 0;
|
||||
@@ -819,11 +838,11 @@ impl DisplaySnapshot {
|
||||
) -> (MultiBufferPoint, DisplayPoint) {
|
||||
let original_point = point;
|
||||
loop {
|
||||
let mut inlay_point = self.inlay_snapshot.to_inlay_point(point);
|
||||
let mut fold_point = self.fold_snapshot.to_fold_point(inlay_point, Bias::Right);
|
||||
fold_point.0.column = self.fold_snapshot.line_len(fold_point.row());
|
||||
inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
|
||||
point = self.inlay_snapshot.to_buffer_point(inlay_point);
|
||||
let mut inlay_point = self.inlay_snapshot().to_inlay_point(point);
|
||||
let mut fold_point = self.fold_snapshot().to_fold_point(inlay_point, Bias::Right);
|
||||
fold_point.0.column = self.fold_snapshot().line_len(fold_point.row());
|
||||
inlay_point = fold_point.to_inlay_point(self.fold_snapshot());
|
||||
point = self.inlay_snapshot().to_buffer_point(inlay_point);
|
||||
|
||||
let mut display_point = self.point_to_display_point(point, Bias::Right);
|
||||
*display_point.column_mut() = self.line_len(display_point.row());
|
||||
@@ -841,7 +860,8 @@ impl DisplaySnapshot {
|
||||
let new_end = if range.end.column > 0 {
|
||||
MultiBufferPoint::new(
|
||||
range.end.row,
|
||||
self.buffer_snapshot.line_len(MultiBufferRow(range.end.row)),
|
||||
self.buffer_snapshot()
|
||||
.line_len(MultiBufferRow(range.end.row)),
|
||||
)
|
||||
} else {
|
||||
range.end
|
||||
@@ -851,52 +871,52 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
|
||||
let inlay_point = self.inlay_snapshot.to_inlay_point(point);
|
||||
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let inlay_point = self.inlay_snapshot().to_inlay_point(point);
|
||||
let fold_point = self.fold_snapshot().to_fold_point(inlay_point, bias);
|
||||
let tab_point = self.tab_snapshot().to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot().tab_point_to_wrap_point(tab_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
|
||||
pub fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
|
||||
self.inlay_snapshot
|
||||
self.inlay_snapshot()
|
||||
.to_buffer_point(self.display_point_to_inlay_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn display_point_to_inlay_offset(&self, point: DisplayPoint, bias: Bias) -> InlayOffset {
|
||||
self.inlay_snapshot
|
||||
self.inlay_snapshot()
|
||||
.to_offset(self.display_point_to_inlay_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn anchor_to_inlay_offset(&self, anchor: Anchor) -> InlayOffset {
|
||||
self.inlay_snapshot
|
||||
.to_inlay_offset(anchor.to_offset(&self.buffer_snapshot))
|
||||
self.inlay_snapshot()
|
||||
.to_inlay_offset(anchor.to_offset(self.buffer_snapshot()))
|
||||
}
|
||||
|
||||
pub fn display_point_to_anchor(&self, point: DisplayPoint, bias: Bias) -> Anchor {
|
||||
self.buffer_snapshot
|
||||
self.buffer_snapshot()
|
||||
.anchor_at(point.to_offset(self, bias), bias)
|
||||
}
|
||||
|
||||
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
fold_point.to_inlay_point(&self.fold_snapshot)
|
||||
let tab_point = self.wrap_snapshot().to_tab_point(wrap_point);
|
||||
let fold_point = self.tab_snapshot().to_fold_point(tab_point, bias).0;
|
||||
fold_point.to_inlay_point(self.fold_snapshot())
|
||||
}
|
||||
|
||||
pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
self.tab_snapshot.to_fold_point(tab_point, bias).0
|
||||
let tab_point = self.wrap_snapshot().to_tab_point(wrap_point);
|
||||
self.tab_snapshot().to_fold_point(tab_point, bias).0
|
||||
}
|
||||
|
||||
pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let tab_point = self.tab_snapshot().to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot().tab_point_to_wrap_point(tab_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
@@ -1118,7 +1138,7 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator<Item = (char, usize)> + '_ {
|
||||
self.buffer_snapshot.chars_at(offset).map(move |ch| {
|
||||
self.buffer_snapshot().chars_at(offset).map(move |ch| {
|
||||
let ret = (ch, offset);
|
||||
offset += ch.len_utf8();
|
||||
ret
|
||||
@@ -1129,7 +1149,7 @@ impl DisplaySnapshot {
|
||||
&self,
|
||||
mut offset: usize,
|
||||
) -> impl Iterator<Item = (char, usize)> + '_ {
|
||||
self.buffer_snapshot
|
||||
self.buffer_snapshot()
|
||||
.reversed_chars_at(offset)
|
||||
.map(move |ch| {
|
||||
offset -= ch.len_utf8();
|
||||
@@ -1152,11 +1172,11 @@ impl DisplaySnapshot {
|
||||
pub fn clip_at_line_end(&self, display_point: DisplayPoint) -> DisplayPoint {
|
||||
let mut point = self.display_point_to_point(display_point, Bias::Left);
|
||||
|
||||
if point.column != self.buffer_snapshot.line_len(MultiBufferRow(point.row)) {
|
||||
if point.column != self.buffer_snapshot().line_len(MultiBufferRow(point.row)) {
|
||||
return display_point;
|
||||
}
|
||||
point.column = point.column.saturating_sub(1);
|
||||
point = self.buffer_snapshot.clip_point(point, Bias::Left);
|
||||
point = self.buffer_snapshot().clip_point(point, Bias::Left);
|
||||
self.point_to_display_point(point, Bias::Left)
|
||||
}
|
||||
|
||||
@@ -1164,7 +1184,7 @@ impl DisplaySnapshot {
|
||||
where
|
||||
T: ToOffset,
|
||||
{
|
||||
self.fold_snapshot.folds_in_range(range)
|
||||
self.fold_snapshot().folds_in_range(range)
|
||||
}
|
||||
|
||||
pub fn blocks_in_range(
|
||||
@@ -1176,7 +1196,7 @@ impl DisplaySnapshot {
|
||||
.map(|(row, block)| (DisplayRow(row), block))
|
||||
}
|
||||
|
||||
pub fn sticky_header_excerpt(&self, row: f32) -> Option<StickyHeaderExcerpt<'_>> {
|
||||
pub fn sticky_header_excerpt(&self, row: f64) -> Option<StickyHeaderExcerpt<'_>> {
|
||||
self.block_snapshot.sticky_header_excerpt(row)
|
||||
}
|
||||
|
||||
@@ -1185,12 +1205,12 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn intersects_fold<T: ToOffset>(&self, offset: T) -> bool {
|
||||
self.fold_snapshot.intersects_fold(offset)
|
||||
self.fold_snapshot().intersects_fold(offset)
|
||||
}
|
||||
|
||||
pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool {
|
||||
self.block_snapshot.is_line_replaced(buffer_row)
|
||||
|| self.fold_snapshot.is_line_folded(buffer_row)
|
||||
|| self.fold_snapshot().is_line_folded(buffer_row)
|
||||
}
|
||||
|
||||
pub fn is_block_line(&self, display_row: DisplayRow) -> bool {
|
||||
@@ -1207,7 +1227,7 @@ impl DisplaySnapshot {
|
||||
.block_snapshot
|
||||
.to_wrap_point(BlockPoint::new(display_row.0, 0), Bias::Left)
|
||||
.row();
|
||||
self.wrap_snapshot.soft_wrap_indent(wrap_row)
|
||||
self.wrap_snapshot().soft_wrap_indent(wrap_row)
|
||||
}
|
||||
|
||||
pub fn text(&self) -> String {
|
||||
@@ -1228,7 +1248,7 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn line_indent_for_buffer_row(&self, buffer_row: MultiBufferRow) -> LineIndent {
|
||||
self.buffer_snapshot.line_indent_for_row(buffer_row)
|
||||
self.buffer_snapshot().line_indent_for_row(buffer_row)
|
||||
}
|
||||
|
||||
pub fn line_len(&self, row: DisplayRow) -> u32 {
|
||||
@@ -1246,7 +1266,7 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn starts_indent(&self, buffer_row: MultiBufferRow) -> bool {
|
||||
let max_row = self.buffer_snapshot.max_row();
|
||||
let max_row = self.buffer_snapshot().max_row();
|
||||
if buffer_row >= max_row {
|
||||
return false;
|
||||
}
|
||||
@@ -1271,10 +1291,11 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn crease_for_buffer_row(&self, buffer_row: MultiBufferRow) -> Option<Crease<Point>> {
|
||||
let start = MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot.line_len(buffer_row));
|
||||
let start =
|
||||
MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot().line_len(buffer_row));
|
||||
if let Some(crease) = self
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)
|
||||
.query_row(buffer_row, self.buffer_snapshot())
|
||||
{
|
||||
match crease {
|
||||
Crease::Inline {
|
||||
@@ -1284,7 +1305,7 @@ impl DisplaySnapshot {
|
||||
render_trailer,
|
||||
metadata,
|
||||
} => Some(Crease::Inline {
|
||||
range: range.to_point(&self.buffer_snapshot),
|
||||
range: range.to_point(self.buffer_snapshot()),
|
||||
placeholder: placeholder.clone(),
|
||||
render_toggle: render_toggle.clone(),
|
||||
render_trailer: render_trailer.clone(),
|
||||
@@ -1298,7 +1319,7 @@ impl DisplaySnapshot {
|
||||
block_priority,
|
||||
render_toggle,
|
||||
} => Some(Crease::Block {
|
||||
range: range.to_point(&self.buffer_snapshot),
|
||||
range: range.to_point(self.buffer_snapshot()),
|
||||
block_height: *block_height,
|
||||
block_style: *block_style,
|
||||
render_block: render_block.clone(),
|
||||
@@ -1310,7 +1331,7 @@ impl DisplaySnapshot {
|
||||
&& !self.is_line_folded(MultiBufferRow(start.row))
|
||||
{
|
||||
let start_line_indent = self.line_indent_for_buffer_row(buffer_row);
|
||||
let max_point = self.buffer_snapshot.max_point();
|
||||
let max_point = self.buffer_snapshot().max_point();
|
||||
let mut end = None;
|
||||
|
||||
for row in (buffer_row.0 + 1)..=max_point.row {
|
||||
@@ -1321,7 +1342,7 @@ impl DisplaySnapshot {
|
||||
let prev_row = row - 1;
|
||||
end = Some(Point::new(
|
||||
prev_row,
|
||||
self.buffer_snapshot.line_len(MultiBufferRow(prev_row)),
|
||||
self.buffer_snapshot().line_len(MultiBufferRow(prev_row)),
|
||||
));
|
||||
break;
|
||||
}
|
||||
@@ -1330,7 +1351,7 @@ impl DisplaySnapshot {
|
||||
let mut row_before_line_breaks = end.unwrap_or(max_point);
|
||||
while row_before_line_breaks.row > start.row
|
||||
&& self
|
||||
.buffer_snapshot
|
||||
.buffer_snapshot()
|
||||
.is_line_blank(MultiBufferRow(row_before_line_breaks.row))
|
||||
{
|
||||
row_before_line_breaks.row -= 1;
|
||||
@@ -1338,7 +1359,7 @@ impl DisplaySnapshot {
|
||||
|
||||
row_before_line_breaks = Point::new(
|
||||
row_before_line_breaks.row,
|
||||
self.buffer_snapshot
|
||||
self.buffer_snapshot()
|
||||
.line_len(MultiBufferRow(row_before_line_breaks.row)),
|
||||
);
|
||||
|
||||
@@ -1482,23 +1503,23 @@ impl DisplayPoint {
|
||||
|
||||
pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
|
||||
let wrap_point = map.block_snapshot.to_wrap_point(self.0, bias);
|
||||
let tab_point = map.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot);
|
||||
map.inlay_snapshot
|
||||
.to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point))
|
||||
let tab_point = map.wrap_snapshot().to_tab_point(wrap_point);
|
||||
let fold_point = map.tab_snapshot().to_fold_point(tab_point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(map.fold_snapshot());
|
||||
map.inlay_snapshot()
|
||||
.to_buffer_offset(map.inlay_snapshot().to_offset(inlay_point))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDisplayPoint for usize {
|
||||
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
|
||||
map.point_to_display_point(self.to_point(&map.buffer_snapshot), Bias::Left)
|
||||
map.point_to_display_point(self.to_point(map.buffer_snapshot()), Bias::Left)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDisplayPoint for OffsetUtf16 {
|
||||
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
|
||||
self.to_offset(&map.buffer_snapshot).to_display_point(map)
|
||||
self.to_offset(map.buffer_snapshot()).to_display_point(map)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1510,7 +1531,7 @@ impl ToDisplayPoint for Point {
|
||||
|
||||
impl ToDisplayPoint for Anchor {
|
||||
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
|
||||
self.to_point(&map.buffer_snapshot).to_display_point(map)
|
||||
self.to_point(map.buffer_snapshot()).to_display_point(map)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1599,10 +1620,10 @@ pub mod tests {
|
||||
let mut blocks = Vec::new();
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot().text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot().text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot().text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot().text());
|
||||
log::info!("block text: {:?}", snapshot.block_snapshot.text());
|
||||
log::info!("display text: {:?}", snapshot.text());
|
||||
|
||||
@@ -1634,7 +1655,8 @@ pub mod tests {
|
||||
30..=44 => {
|
||||
map.update(cx, |map, cx| {
|
||||
if rng.random() || blocks.is_empty() {
|
||||
let buffer = map.snapshot(cx).buffer_snapshot;
|
||||
let snapshot = map.snapshot(cx);
|
||||
let buffer = snapshot.buffer_snapshot();
|
||||
let block_properties = (0..rng.random_range(1..=1))
|
||||
.map(|_| {
|
||||
let position = buffer.anchor_after(buffer.clip_offset(
|
||||
@@ -1715,15 +1737,15 @@ pub mod tests {
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
fold_count = snapshot.fold_count();
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
|
||||
log::info!("buffer text: {:?}", snapshot.buffer_snapshot().text());
|
||||
log::info!("fold text: {:?}", snapshot.fold_snapshot().text());
|
||||
log::info!("tab text: {:?}", snapshot.tab_snapshot().text());
|
||||
log::info!("wrap text: {:?}", snapshot.wrap_snapshot().text());
|
||||
log::info!("block text: {:?}", snapshot.block_snapshot.text());
|
||||
log::info!("display text: {:?}", snapshot.text());
|
||||
|
||||
// Line boundaries
|
||||
let buffer = &snapshot.buffer_snapshot;
|
||||
let buffer = snapshot.buffer_snapshot();
|
||||
for _ in 0..5 {
|
||||
let row = rng.random_range(0..=buffer.max_point().row);
|
||||
let column = rng.random_range(0..=buffer.line_len(MultiBufferRow(row)));
|
||||
@@ -1877,37 +1899,37 @@ pub mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(0), 7),
|
||||
language::SelectionGoal::HorizontalPosition(x.0)
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
movement::down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(0), 7),
|
||||
language::SelectionGoal::HorizontalPosition(x.0),
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x)),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(1), 10),
|
||||
language::SelectionGoal::HorizontalPosition(x.0)
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
movement::down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(DisplayRow(1), 10),
|
||||
language::SelectionGoal::HorizontalPosition(x.0),
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x)),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 4),
|
||||
language::SelectionGoal::HorizontalPosition(x.0)
|
||||
language::SelectionGoal::HorizontalPosition(f64::from(x))
|
||||
)
|
||||
);
|
||||
|
||||
let ix = snapshot.buffer_snapshot.text().find("seven").unwrap();
|
||||
let ix = snapshot.buffer_snapshot().text().find("seven").unwrap();
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(ix..ix, "and ")], None, cx);
|
||||
});
|
||||
@@ -1920,7 +1942,7 @@ pub mod tests {
|
||||
|
||||
// Re-wrap on font size changes
|
||||
map.update(cx, |map, cx| {
|
||||
map.set_font(font("Helvetica"), px(font_size.0 + 3.), cx)
|
||||
map.set_font(font("Helvetica"), font_size + Pixels::from(3.), cx)
|
||||
});
|
||||
|
||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user