Compare commits
266 Commits
debug-view
...
vim-set-ic
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
167d2e9ac4 | ||
|
|
0219e0ed72 | ||
|
|
80989d6767 | ||
|
|
719013dae6 | ||
|
|
8af3f583c2 | ||
|
|
f1d80b715a | ||
|
|
42ef3e5d3d | ||
|
|
90ea252c82 | ||
|
|
6e5ff6d091 | ||
|
|
04216a88f3 | ||
|
|
3ae65153db | ||
|
|
ffc9060607 | ||
|
|
4fc4707cfc | ||
|
|
8662025d12 | ||
|
|
ceddd5752a | ||
|
|
20166727a6 | ||
|
|
6e80fca0d5 | ||
|
|
778ca84f85 | ||
|
|
ebdc0572c6 | ||
|
|
cda48a3a1c | ||
|
|
b7f9fd7d74 | ||
|
|
98ab118526 | ||
|
|
1e70a1a4ce | ||
|
|
163219af35 | ||
|
|
f96fd928d7 | ||
|
|
9aa5817b85 | ||
|
|
28cc39ad56 | ||
|
|
0da3f9ffda | ||
|
|
f2efe78feb | ||
|
|
ed7217ff46 | ||
|
|
f9fb389f86 | ||
|
|
632e569c5f | ||
|
|
0c71aa9f01 | ||
|
|
92a09ecf25 | ||
|
|
bad96776cd | ||
|
|
aa14980523 | ||
|
|
12aba6193e | ||
|
|
720971e47b | ||
|
|
0a10e3e264 | ||
|
|
77854f4627 | ||
|
|
5ce7eda8d2 | ||
|
|
6d7a4c441b | ||
|
|
cc85a48de5 | ||
|
|
4cd839e352 | ||
|
|
78098f6809 | ||
|
|
4d2ff6c899 | ||
|
|
6f5d1522cb | ||
|
|
682cf023ca | ||
|
|
72948e14ee | ||
|
|
a063a70cfb | ||
|
|
687e22b4c3 | ||
|
|
e13b88e4bd | ||
|
|
e1e9f78dc3 | ||
|
|
0fe696bc7c | ||
|
|
ead38fd1be | ||
|
|
fbdf5d4df4 | ||
|
|
837f282f1e | ||
|
|
bd3cccea15 | ||
|
|
d437bbaa0a | ||
|
|
114791e1a8 | ||
|
|
d6fcd404af | ||
|
|
7ad9ca9bcc | ||
|
|
a55dff7834 | ||
|
|
6db621a1ed | ||
|
|
948b4379df | ||
|
|
8db24dd8ad | ||
|
|
4aac5642c1 | ||
|
|
30b49cfbf5 | ||
|
|
c69912c76a | ||
|
|
7f14ab26dd | ||
|
|
5ee73d3e3c | ||
|
|
d5aa81a5b2 | ||
|
|
21855c15e4 | ||
|
|
1f9279a56f | ||
|
|
da71465437 | ||
|
|
bcc8149263 | ||
|
|
b1528601cc | ||
|
|
ee357e8987 | ||
|
|
0891a7142d | ||
|
|
94fe862fb6 | ||
|
|
4f91fab190 | ||
|
|
0e0f48d8e1 | ||
|
|
7980dbdaea | ||
|
|
a5683f3541 | ||
|
|
67984d5e49 | ||
|
|
d83d7d35cb | ||
|
|
6470443271 | ||
|
|
5b72dfff87 | ||
|
|
495a7b0a84 | ||
|
|
301e976465 | ||
|
|
daebc4052d | ||
|
|
ecc35fcd9a | ||
|
|
236006b6b3 | ||
|
|
39c4480841 | ||
|
|
48aac2a746 | ||
|
|
ae036f8ead | ||
|
|
de1de25712 | ||
|
|
18fc951135 | ||
|
|
40138e12a4 | ||
|
|
e7a5c81b07 | ||
|
|
d98175c0a6 | ||
|
|
bc7d804a42 | ||
|
|
50bb8a4ae6 | ||
|
|
b2b90b003d | ||
|
|
c0f56f500e | ||
|
|
a9fe18f4cb | ||
|
|
3c5e683fbe | ||
|
|
783ba389f7 | ||
|
|
e72021a26b | ||
|
|
f25ace6be0 | ||
|
|
c627543b46 | ||
|
|
f303a461c4 | ||
|
|
a9def8128f | ||
|
|
6580eac077 | ||
|
|
5c3c79d667 | ||
|
|
16fccb5c76 | ||
|
|
a25504edaf | ||
|
|
bc11844b2e | ||
|
|
10b99c6f55 | ||
|
|
17dea24533 | ||
|
|
17e55daf6f | ||
|
|
6b968e0118 | ||
|
|
0f66310192 | ||
|
|
26adc70ae6 | ||
|
|
a5fb290252 | ||
|
|
8fc7bd9ae8 | ||
|
|
7167be5889 | ||
|
|
d321cf93ba | ||
|
|
ce7b02e3a1 | ||
|
|
03f9cf4414 | ||
|
|
3c626f3758 | ||
|
|
4a1bab52f3 | ||
|
|
91b0f42382 | ||
|
|
523c042930 | ||
|
|
ed7bd5a8ed | ||
|
|
8ebe4fa149 | ||
|
|
6b646e3a14 | ||
|
|
e653cc90c5 | ||
|
|
0794de71e3 | ||
|
|
2b283e7c53 | ||
|
|
45a4277026 | ||
|
|
fa76b6ce06 | ||
|
|
a13e3a8af3 | ||
|
|
39370bceb2 | ||
|
|
53885c00d3 | ||
|
|
6f3e66d027 | ||
|
|
b3f9be6e9c | ||
|
|
4353b61155 | ||
|
|
e1b57f00a0 | ||
|
|
c5219e8fd2 | ||
|
|
5612a961b0 | ||
|
|
c53e5ba397 | ||
|
|
d5a99d079e | ||
|
|
9418a2f4bc | ||
|
|
880fff471c | ||
|
|
5f6ae2361f | ||
|
|
5d89b2ea26 | ||
|
|
0f7dbf57f5 | ||
|
|
b60f19f71e | ||
|
|
0a261ad8d0 | ||
|
|
28ed08340c | ||
|
|
74fe3b17f7 | ||
|
|
9112554262 | ||
|
|
3b79490e8f | ||
|
|
52c467ea3a | ||
|
|
831de8e48f | ||
|
|
bc528411df | ||
|
|
9ac511e47c | ||
|
|
afaed3af62 | ||
|
|
f78699eb71 | ||
|
|
3646aa6bba | ||
|
|
dc20a41e0d | ||
|
|
6a24ad7d39 | ||
|
|
8fefd793f0 | ||
|
|
f6e2a2a808 | ||
|
|
3cf6fa8f61 | ||
|
|
2759f541da | ||
|
|
809d3bfe00 | ||
|
|
0aad47493e | ||
|
|
271d67f7ad | ||
|
|
2e87387e53 | ||
|
|
15e75bdf04 | ||
|
|
3ac14e15bb | ||
|
|
9e7302520e | ||
|
|
1bf8332333 | ||
|
|
d8048f46ee | ||
|
|
edb804de5a | ||
|
|
691bfe71db | ||
|
|
1d5da68560 | ||
|
|
f07bc12aed | ||
|
|
4532765ae8 | ||
|
|
25a1827456 | ||
|
|
98865a3ff2 | ||
|
|
681a4adc42 | ||
|
|
5e502a32fb | ||
|
|
e9fbcf5abf | ||
|
|
c9e3b32366 | ||
|
|
e9abd5b28b | ||
|
|
a90abb1009 | ||
|
|
46d19d8a47 | ||
|
|
e484f49ee8 | ||
|
|
80dcabe95c | ||
|
|
e602cfadd3 | ||
|
|
d4adb51553 | ||
|
|
a0514af589 | ||
|
|
c88fdaf02d | ||
|
|
003163eb4f | ||
|
|
9e64b7b911 | ||
|
|
d4fd59f0a2 | ||
|
|
4e6e424fd7 | ||
|
|
dccbb47fbc | ||
|
|
b97843ea02 | ||
|
|
fbe06238e4 | ||
|
|
e0028fbef2 | ||
|
|
1bbf98aea6 | ||
|
|
8bac1bee7a | ||
|
|
55dc9ff7ca | ||
|
|
50bd8bc255 | ||
|
|
a2c71d3d20 | ||
|
|
79620454d0 | ||
|
|
271771c742 | ||
|
|
891a06c294 | ||
|
|
11041ef3b0 | ||
|
|
839c216620 | ||
|
|
18df6a81b4 | ||
|
|
f5c2e4b49e | ||
|
|
1d1bbf01a9 | ||
|
|
ffa23d25e3 | ||
|
|
782058647d | ||
|
|
be77682a3f | ||
|
|
8df616e28b | ||
|
|
89520ea221 | ||
|
|
de75e2d9f6 | ||
|
|
4e316c683b | ||
|
|
1afbfcb832 | ||
|
|
be7575536e | ||
|
|
30a29ab34e | ||
|
|
b9188e0fd3 | ||
|
|
df6f0bc2a7 | ||
|
|
4743fe8415 | ||
|
|
0f4bdca9e9 | ||
|
|
154b01c5fe | ||
|
|
b6944d0bae | ||
|
|
94fcbb400b | ||
|
|
2e97ef32c4 | ||
|
|
aa5b99dc11 | ||
|
|
3217bcb83e | ||
|
|
a3da66cec0 | ||
|
|
9e6f1d5a6e | ||
|
|
430ac5175f | ||
|
|
5f728efccf | ||
|
|
194a13ffb5 | ||
|
|
66f2fda625 | ||
|
|
e62dd2a0e5 | ||
|
|
c826ce6fc6 | ||
|
|
e5e308ba78 | ||
|
|
166b2352f3 | ||
|
|
f18b19a73e | ||
|
|
b09764c54a | ||
|
|
5f4f0a873e | ||
|
|
82e1e5b7ac | ||
|
|
530225a06a | ||
|
|
11212b80f9 | ||
|
|
e3e0522e32 | ||
|
|
fc0eb882f7 | ||
|
|
34c96c618e |
@@ -10,3 +10,15 @@
|
||||
# Here, we opted to use `[target.'cfg(all())']` instead of `[build]` because `[target.'**']` is guaranteed to be cumulative.
|
||||
[target.'cfg(all())']
|
||||
rustflags = ["-D", "warnings"]
|
||||
|
||||
# Use Mold on Linux, because it's faster than GNU ld and LLD.
|
||||
#
|
||||
# We no longer set this in the default `config.toml` so that developers can opt in to Wild, which
|
||||
# is faster than Mold, in their own ~/.cargo/config.toml.
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
@@ -4,14 +4,9 @@ rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"]
|
||||
|
||||
[alias]
|
||||
xtask = "run --package xtask --"
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests", "--all-features", "--config", "target.'cfg(true)'.runner='cargo run -p perf --release'", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]"]
|
||||
# Keep similar flags here to share some ccache
|
||||
perf-compare = ["run", "--profile", "release-fast", "-p", "perf", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]", "--", "compare"]
|
||||
|
||||
[target.'cfg(target_os = "windows")']
|
||||
rustflags = [
|
||||
|
||||
@@ -26,7 +26,7 @@ third-party = [
|
||||
# build of remote_server should not include scap / its x11 dependency
|
||||
{ name = "scap", git = "https://github.com/zed-industries/scap", rev = "808aa5c45b41e8f44729d02e38fd00a2fe2722e7" },
|
||||
# build of remote_server should not need to include on libalsa through rodio
|
||||
{ name = "rodio", git = "https://github.com/RustAudio/rodio", branch = "better_wav_output"},
|
||||
{ name = "rodio", git = "https://github.com/RustAudio/rodio" },
|
||||
]
|
||||
|
||||
[final-excludes]
|
||||
|
||||
48
.github/workflows/community_champion_auto_labeler.yml
vendored
Normal file
48
.github/workflows/community_champion_auto_labeler.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Community Champion Auto Labeler
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
label_community_champion:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if author is a community champion and apply label
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const communityChampionBody = `${{ secrets.COMMUNITY_CHAMPIONS }}`;
|
||||
|
||||
const communityChampions = communityChampionBody
|
||||
.split('\n')
|
||||
.map(handle => handle.trim().toLowerCase());
|
||||
|
||||
let author;
|
||||
if (context.eventName === 'issues') {
|
||||
author = context.payload.issue.user.login;
|
||||
} else if (context.eventName === 'pull_request_target') {
|
||||
author = context.payload.pull_request.user.login;
|
||||
}
|
||||
|
||||
if (!author || !communityChampions.includes(author.toLowerCase())) {
|
||||
return;
|
||||
}
|
||||
|
||||
const issueNumber = context.payload.issue?.number || context.payload.pull_request?.number;
|
||||
|
||||
try {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ['community champion']
|
||||
});
|
||||
|
||||
console.log(`Applied 'community champion' label to #${issueNumber} by ${author}`);
|
||||
} catch (error) {
|
||||
console.error(`Failed to apply label: ${error.message}`);
|
||||
}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -20,6 +20,7 @@
|
||||
.venv
|
||||
.vscode
|
||||
.wrangler
|
||||
.perf-runs
|
||||
/assets/*licenses.*
|
||||
/crates/collab/seed.json
|
||||
/crates/theme/schemas/theme.json
|
||||
|
||||
@@ -63,6 +63,7 @@ Although there are few hard and fast rules, typically we don't merge:
|
||||
- New file icons. Zed's default icon theme consists of icons that are hand-designed to fit together in a cohesive manner, please don't submit PRs with off-the-shelf SVGs.
|
||||
- Giant refactorings.
|
||||
- Non-trivial changes with no tests.
|
||||
- Stylistic code changes that do not alter any app logic. Reducing allocations, removing `.unwrap()`s, fixing typos is great; making code "more readable" — maybe not so much.
|
||||
- Features where (in our subjective opinion) the extra complexity isn't worth it for the number of people who will benefit.
|
||||
- Anything that seems completely AI generated.
|
||||
|
||||
|
||||
1307
Cargo.lock
generated
1307
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
32
Cargo.toml
32
Cargo.toml
@@ -35,6 +35,7 @@ members = [
|
||||
"crates/cloud_api_client",
|
||||
"crates/cloud_api_types",
|
||||
"crates/cloud_llm_client",
|
||||
"crates/cloud_zeta2_prompt",
|
||||
"crates/collab",
|
||||
"crates/collab_ui",
|
||||
"crates/collections",
|
||||
@@ -89,9 +90,8 @@ members = [
|
||||
"crates/image_viewer",
|
||||
"crates/inspector_ui",
|
||||
"crates/install_cli",
|
||||
"crates/jj",
|
||||
"crates/jj_ui",
|
||||
"crates/journal",
|
||||
"crates/json_schema_store",
|
||||
"crates/keymap_editor",
|
||||
"crates/language",
|
||||
"crates/language_extension",
|
||||
@@ -150,8 +150,9 @@ members = [
|
||||
"crates/semantic_version",
|
||||
"crates/session",
|
||||
"crates/settings",
|
||||
"crates/settings_macros",
|
||||
"crates/settings_profile_selector",
|
||||
"crates/settings_ui_macros",
|
||||
"crates/settings_ui",
|
||||
"crates/snippet",
|
||||
"crates/snippet_provider",
|
||||
"crates/snippets_ui",
|
||||
@@ -220,6 +221,7 @@ members = [
|
||||
# Tooling
|
||||
#
|
||||
|
||||
"tooling/perf",
|
||||
"tooling/workspace-hack",
|
||||
"tooling/xtask",
|
||||
]
|
||||
@@ -270,6 +272,7 @@ clock = { path = "crates/clock" }
|
||||
cloud_api_client = { path = "crates/cloud_api_client" }
|
||||
cloud_api_types = { path = "crates/cloud_api_types" }
|
||||
cloud_llm_client = { path = "crates/cloud_llm_client" }
|
||||
cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" }
|
||||
collab = { path = "crates/collab" }
|
||||
collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections" }
|
||||
@@ -319,9 +322,8 @@ edit_prediction_context = { path = "crates/edit_prediction_context" }
|
||||
zeta2_tools = { path = "crates/zeta2_tools" }
|
||||
inspector_ui = { path = "crates/inspector_ui" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
jj = { path = "crates/jj" }
|
||||
jj_ui = { path = "crates/jj_ui" }
|
||||
journal = { path = "crates/journal" }
|
||||
json_schema_store = { path = "crates/json_schema_store" }
|
||||
keymap_editor = { path = "crates/keymap_editor" }
|
||||
language = { path = "crates/language" }
|
||||
language_extension = { path = "crates/language_extension" }
|
||||
@@ -356,6 +358,7 @@ outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
panel = { path = "crates/panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
perf = { path = "tooling/perf" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
@@ -375,7 +378,7 @@ remote_server = { path = "crates/remote_server" }
|
||||
repl = { path = "crates/repl" }
|
||||
reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rodio = { git = "https://github.com/RustAudio/rodio", branch = "better_wav_output"}
|
||||
rodio = { git = "https://github.com/RustAudio/rodio" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rules_library = { path = "crates/rules_library" }
|
||||
@@ -383,8 +386,8 @@ search = { path = "crates/search" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
session = { path = "crates/session" }
|
||||
settings = { path = "crates/settings" }
|
||||
settings_macros = { path = "crates/settings_macros" }
|
||||
settings_ui = { path = "crates/settings_ui" }
|
||||
settings_ui_macros = { path = "crates/settings_ui_macros" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
snippet_provider = { path = "crates/snippet_provider" }
|
||||
snippets_ui = { path = "crates/snippets_ui" }
|
||||
@@ -440,9 +443,9 @@ zlog_settings = { path = "crates/zlog_settings" }
|
||||
# External crates
|
||||
#
|
||||
|
||||
agent-client-protocol = { version = "0.4.0", features = ["unstable"] }
|
||||
agent-client-protocol = { version = "0.4.3", features = ["unstable"] }
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" }
|
||||
alacritty_terminal = "0.25.1-rc1"
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
arrayvec = { version = "0.7.4", features = ["serde"] }
|
||||
@@ -511,6 +514,7 @@ futures-lite = "1.13"
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
hashbrown = "0.15.3"
|
||||
heck = "0.5"
|
||||
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
@@ -526,7 +530,6 @@ indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
jj-lib = { git = "https://github.com/jj-vcs/jj", rev = "e18eb8e05efaa153fad5ef46576af145bba1807f" }
|
||||
json_dotpath = "1.1"
|
||||
jsonschema = "0.30.0"
|
||||
jsonwebtoken = "9.3"
|
||||
@@ -666,7 +669,7 @@ tokio = { version = "1" }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
|
||||
toml = "0.8"
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.6", features = ["wasm"] }
|
||||
tree-sitter = { version = "0.25.10", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" }
|
||||
@@ -683,7 +686,7 @@ tree-sitter-html = "0.23"
|
||||
tree-sitter-jsdoc = "0.23"
|
||||
tree-sitter-json = "0.24"
|
||||
tree-sitter-md = { git = "https://github.com/tree-sitter-grammars/tree-sitter-markdown", rev = "9a23c1a96c0513d8fc6520972beedd419a973539" }
|
||||
tree-sitter-python = { git = "https://github.com/zed-industries/tree-sitter-python", rev = "218fcbf3fda3d029225f3dec005cb497d111b35e" }
|
||||
tree-sitter-python = "0.25"
|
||||
tree-sitter-regex = "0.24"
|
||||
tree-sitter-ruby = "0.23"
|
||||
tree-sitter-rust = "0.24"
|
||||
@@ -711,9 +714,11 @@ wasmtime = { version = "29", default-features = false, features = [
|
||||
wasmtime-wasi = "29"
|
||||
which = "6.0.0"
|
||||
windows-core = "0.61"
|
||||
windows-sys = "0.61"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
yawc = "0.2.5"
|
||||
zeroize = "1.8"
|
||||
zstd = "0.11"
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
@@ -740,6 +745,7 @@ features = [
|
||||
"Win32_Networking_WinSock",
|
||||
"Win32_Security",
|
||||
"Win32_Security_Credentials",
|
||||
"Win32_Security_Cryptography",
|
||||
"Win32_Storage_FileSystem",
|
||||
"Win32_System_Com",
|
||||
"Win32_System_Com_StructuredStorage",
|
||||
@@ -808,6 +814,7 @@ image_viewer = { codegen-units = 1 }
|
||||
edit_prediction_button = { codegen-units = 1 }
|
||||
install_cli = { codegen-units = 1 }
|
||||
journal = { codegen-units = 1 }
|
||||
json_schema_store = { codegen-units = 1 }
|
||||
lmstudio = { codegen-units = 1 }
|
||||
menu = { codegen-units = 1 }
|
||||
notifications = { codegen-units = 1 }
|
||||
@@ -859,6 +866,7 @@ todo = "deny"
|
||||
declare_interior_mutable_const = "deny"
|
||||
|
||||
redundant_clone = "deny"
|
||||
disallowed_methods = "deny"
|
||||
|
||||
# We currently do not restrict any style rules
|
||||
# as it slows down shipping code to Zed.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.89-bookworm as builder
|
||||
FROM rust:1.90-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -550,6 +550,8 @@
|
||||
"cmd-ctrl-left": "editor::SelectSmallerSyntaxNode", // Shrink selection
|
||||
"cmd-ctrl-right": "editor::SelectLargerSyntaxNode", // Expand selection
|
||||
"cmd-ctrl-up": "editor::SelectPreviousSyntaxNode", // Move selection up
|
||||
"ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand selection (VSCode version)
|
||||
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink selection (VSCode version)
|
||||
"cmd-ctrl-down": "editor::SelectNextSyntaxNode", // Move selection down
|
||||
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"cmd-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
"up": "menu::SelectPrevious",
|
||||
"enter": "menu::Confirm",
|
||||
"ctrl-enter": "menu::SecondaryConfirm",
|
||||
"ctrl-escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"escape": "menu::Cancel",
|
||||
"shift-alt-enter": "menu::Restart",
|
||||
@@ -346,7 +345,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AcpThread > Editor",
|
||||
"context": "AcpThread > Editor && !use_modifier_to_send",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
@@ -356,6 +355,17 @@
|
||||
"shift-tab": "agent::CycleModeSelector"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AcpThread > Editor && use_modifier_to_send",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-enter": "agent::Chat",
|
||||
"ctrl-shift-r": "agent::OpenAgentDiff",
|
||||
"ctrl-shift-y": "agent::KeepAll",
|
||||
"ctrl-shift-n": "agent::RejectAll",
|
||||
"shift-tab": "agent::CycleModeSelector"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ThreadHistory",
|
||||
"use_key_equivalents": true,
|
||||
@@ -465,8 +475,8 @@
|
||||
"ctrl-k ctrl-w": "workspace::CloseAllItemsAndPanes",
|
||||
"back": "pane::GoBack",
|
||||
"alt--": "pane::GoBack",
|
||||
"alt-=": "pane::GoForward",
|
||||
"forward": "pane::GoForward",
|
||||
"alt-=": "pane::GoForward",
|
||||
"f3": "search::SelectNextMatch",
|
||||
"shift-f3": "search::SelectPreviousMatch",
|
||||
"ctrl-shift-f": "project_search::ToggleFocus",
|
||||
@@ -497,8 +507,6 @@
|
||||
"shift-alt-down": "editor::DuplicateLineDown",
|
||||
"shift-alt-right": "editor::SelectLargerSyntaxNode", // Expand selection
|
||||
"shift-alt-left": "editor::SelectSmallerSyntaxNode", // Shrink selection
|
||||
"ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand selection (VSCode version)
|
||||
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink selection (VSCode version)
|
||||
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
@@ -610,8 +618,6 @@
|
||||
"shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
"shift-alt-=": ["workspace::IncreaseActiveDockSize", { "px": 0 }],
|
||||
"shift-alt-0": "workspace::ResetOpenDocksSize",
|
||||
"ctrl-shift-alt--": ["workspace::DecreaseOpenDocksSize", { "px": 0 }],
|
||||
"ctrl-shift-alt-=": ["workspace::IncreaseOpenDocksSize", { "px": 0 }],
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-shift-t": "pane::ReopenClosedItem",
|
||||
@@ -1115,6 +1121,7 @@
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
"ctrl-n": "workspace::NewTerminal",
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
@@ -1251,8 +1258,8 @@
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-escape": "onboarding::Finish",
|
||||
"alt-tab": "onboarding::SignIn",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
// from the command palette.
|
||||
[
|
||||
{
|
||||
"context": "!GitPanel",
|
||||
"bindings": {
|
||||
"ctrl-g": "menu::Cancel"
|
||||
}
|
||||
|
||||
@@ -95,8 +95,8 @@
|
||||
"g g": "vim::StartOfDocument",
|
||||
"g h": "editor::Hover",
|
||||
"g B": "editor::BlameHover",
|
||||
"g t": "pane::ActivateNextItem",
|
||||
"g shift-t": "pane::ActivatePreviousItem",
|
||||
"g t": "vim::GoToTab",
|
||||
"g shift-t": "vim::GoToPreviousTab",
|
||||
"g d": "editor::GoToDefinition",
|
||||
"g shift-d": "editor::GoToDeclaration",
|
||||
"g y": "editor::GoToTypeDefinition",
|
||||
@@ -240,6 +240,7 @@
|
||||
"delete": "vim::DeleteRight",
|
||||
"g shift-j": "vim::JoinLinesNoWhitespace",
|
||||
"y": "vim::PushYank",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"x": "vim::DeleteRight",
|
||||
"shift-x": "vim::DeleteLeft",
|
||||
"ctrl-a": "vim::Increment",
|
||||
@@ -392,7 +393,7 @@
|
||||
"escape": "editor::Cancel",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"shift-y": "vim::YankLine",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"shift-a": "vim::InsertEndOfLine",
|
||||
"o": "vim::InsertLineBelow",
|
||||
@@ -426,6 +427,7 @@
|
||||
";": "vim::HelixCollapseSelection",
|
||||
":": "command_palette::Toggle",
|
||||
"m": "vim::PushHelixMatch",
|
||||
"s": "vim::HelixSelectRegex",
|
||||
"]": ["vim::PushHelixNext", { "around": true }],
|
||||
"[": ["vim::PushHelixPrevious", { "around": true }],
|
||||
"left": "vim::WrappingLeft",
|
||||
@@ -433,6 +435,8 @@
|
||||
"h": "vim::WrappingLeft",
|
||||
"l": "vim::WrappingRight",
|
||||
"y": "vim::HelixYank",
|
||||
"p": "vim::HelixPaste",
|
||||
"shift-p": ["vim::HelixPaste", { "before": true }],
|
||||
"alt-;": "vim::OtherEnd",
|
||||
"ctrl-r": "vim::Redo",
|
||||
"f": ["vim::PushFindForward", { "before": false, "multiline": true }],
|
||||
|
||||
@@ -115,6 +115,7 @@
|
||||
// Whether to enable vim modes and key bindings.
|
||||
"vim_mode": false,
|
||||
// Whether to enable helix mode and key bindings.
|
||||
// Enabling this mode will automatically enable vim mode.
|
||||
"helix_mode": false,
|
||||
// Whether to show the informational hover box when moving the mouse
|
||||
// over symbols in the editor.
|
||||
@@ -391,8 +392,6 @@
|
||||
"use_system_window_tabs": false,
|
||||
// Titlebar related settings
|
||||
"title_bar": {
|
||||
// When to show the title bar: "always" | "never" | "hide_in_full_screen".
|
||||
"show": "always",
|
||||
// Whether to show the branch icon beside branch switcher in the titlebar.
|
||||
"show_branch_icon": false,
|
||||
// Whether to show the branch name button in the titlebar.
|
||||
@@ -413,15 +412,33 @@
|
||||
"experimental.rodio_audio": false,
|
||||
// Requires 'rodio_audio: true'
|
||||
//
|
||||
// Use the new audio systems automatic gain control for your microphone.
|
||||
// This affects how loud you sound to others.
|
||||
"experimental.control_input_volume": false,
|
||||
// Automatically increase or decrease you microphone's volume. This affects how
|
||||
// loud you sound to others.
|
||||
//
|
||||
// Recommended: off (default)
|
||||
// Microphones are too quite in zed, until everyone is on experimental
|
||||
// audio and has auto speaker volume on this will make you very loud
|
||||
// compared to other speakers.
|
||||
"experimental.auto_microphone_volume": false,
|
||||
// Requires 'rodio_audio: true'
|
||||
//
|
||||
// Use the new audio systems automatic gain control on everyone in the
|
||||
// call. This makes call members who are too quite louder and those who are
|
||||
// too loud quieter. This only affects how things sound for you.
|
||||
"experimental.control_output_volume": false
|
||||
// Automatically increate or decrease the volume of other call members.
|
||||
// This only affects how things sound for you.
|
||||
"experimental.auto_speaker_volume": true,
|
||||
// Requires 'rodio_audio: true'
|
||||
//
|
||||
// Remove background noises. Works great for typing, cars, dogs, AC. Does
|
||||
// not work well on music.
|
||||
"experimental.denoise": true,
|
||||
// Requires 'rodio_audio: true'
|
||||
//
|
||||
// Use audio parameters compatible with the previous versions of
|
||||
// experimental audio and non-experimental audio. When this is false you
|
||||
// will sound strange to anyone not on the latest experimental audio. In
|
||||
// the future we will migrate by setting this to false
|
||||
//
|
||||
// You need to rejoin a call for this setting to apply
|
||||
"experimental.legacy_audio_compatible": true
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
@@ -1414,7 +1431,7 @@
|
||||
// "line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
"line_height": "comfortable",
|
||||
"line_height": "standard",
|
||||
// Activate the python virtual environment, if one is found, in the
|
||||
// terminal's working directory (as resolved by the working_directory
|
||||
// setting). Set this to "off" to disable this behavior.
|
||||
@@ -1434,7 +1451,7 @@
|
||||
//
|
||||
// The shell running in the terminal needs to be configured to emit the title.
|
||||
// Example: `echo -e "\e]2;New Title\007";`
|
||||
"breadcrumbs": true
|
||||
"breadcrumbs": false
|
||||
},
|
||||
// Scrollbar-related settings
|
||||
"scrollbar": {
|
||||
@@ -1514,7 +1531,7 @@
|
||||
// }
|
||||
//
|
||||
"file_types": {
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json"],
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"],
|
||||
"Shell Script": [".env.*"]
|
||||
},
|
||||
// Settings for which version of Node.js and NPM to use when installing
|
||||
|
||||
@@ -43,7 +43,11 @@
|
||||
// "args": ["--login"]
|
||||
// }
|
||||
// }
|
||||
"shell": "system"
|
||||
"shell": "system",
|
||||
// Whether to show the task line in the output of the spawned task, defaults to `true`.
|
||||
"show_summary": true,
|
||||
// Whether to show the command line in the output of the spawned task, defaults to `true`.
|
||||
"show_command": true
|
||||
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
|
||||
// "tags": []
|
||||
}
|
||||
|
||||
BIN
assets/sounds/guest_joined_call.wav
Normal file
BIN
assets/sounds/guest_joined_call.wav
Normal file
Binary file not shown.
@@ -239,7 +239,7 @@
|
||||
"hint": {
|
||||
"color": "#628b80ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#ff8f3fff",
|
||||
|
||||
@@ -248,7 +248,7 @@
|
||||
"hint": {
|
||||
"color": "#8c957dff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#fb4833ff",
|
||||
|
||||
@@ -244,7 +244,7 @@
|
||||
"hint": {
|
||||
"color": "#788ca6ff",
|
||||
"font_style": null,
|
||||
"font_weight": 700
|
||||
"font_weight": null
|
||||
},
|
||||
"keyword": {
|
||||
"color": "#b477cfff",
|
||||
|
||||
11
clippy.toml
11
clippy.toml
@@ -5,3 +5,14 @@ ignore-interior-mutability = [
|
||||
# and Hash impls do not use fields with interior mutability.
|
||||
"agent::context::AgentContextKey"
|
||||
]
|
||||
disallowed-methods = [
|
||||
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
|
||||
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
|
||||
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
|
||||
]
|
||||
disallowed-types = [
|
||||
# { path = "std::collections::HashMap", replacement = "collections::HashMap" },
|
||||
# { path = "std::collections::HashSet", replacement = "collections::HashSet" },
|
||||
# { path = "indexmap::IndexSet", replacement = "collections::IndexSet" },
|
||||
# { path = "indexmap::IndexMap", replacement = "collections::IndexMap" },
|
||||
]
|
||||
|
||||
10
compose.yml
10
compose.yml
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
image: docker.io/library/postgres:15
|
||||
container_name: zed_postgres
|
||||
ports:
|
||||
- 5432:5432
|
||||
@@ -23,7 +23,7 @@ services:
|
||||
- ./.blob_store:/data
|
||||
|
||||
livekit_server:
|
||||
image: livekit/livekit-server
|
||||
image: docker.io/livekit/livekit-server
|
||||
container_name: livekit_server
|
||||
entrypoint: /livekit-server --config /livekit.yaml
|
||||
ports:
|
||||
@@ -34,7 +34,7 @@ services:
|
||||
- ./livekit.yaml:/livekit.yaml
|
||||
|
||||
postgrest_app:
|
||||
image: postgrest/postgrest
|
||||
image: docker.io/postgrest/postgrest
|
||||
container_name: postgrest_app
|
||||
ports:
|
||||
- 8081:8081
|
||||
@@ -47,7 +47,7 @@ services:
|
||||
- postgres
|
||||
|
||||
postgrest_llm:
|
||||
image: postgrest/postgrest
|
||||
image: docker.io/postgrest/postgrest
|
||||
container_name: postgrest_llm
|
||||
ports:
|
||||
- 8082:8082
|
||||
@@ -60,7 +60,7 @@ services:
|
||||
- postgres
|
||||
|
||||
stripe-mock:
|
||||
image: stripe/stripe-mock:v0.178.0
|
||||
image: docker.io/stripe/stripe-mock:v0.178.0
|
||||
ports:
|
||||
- 12111:12111
|
||||
- 12112:12112
|
||||
|
||||
@@ -573,7 +573,7 @@ impl ToolCallContent {
|
||||
))),
|
||||
acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
|
||||
Diff::finalized(
|
||||
diff.path,
|
||||
diff.path.to_string_lossy().into_owned(),
|
||||
diff.old_text,
|
||||
diff.new_text,
|
||||
language_registry,
|
||||
@@ -1780,20 +1780,26 @@ impl AcpThread {
|
||||
limit: Option<u32>,
|
||||
reuse_shared_snapshot: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<String>> {
|
||||
) -> Task<Result<String, acp::Error>> {
|
||||
// Args are 1-based, move to 0-based
|
||||
let line = line.unwrap_or_default().saturating_sub(1);
|
||||
let limit = limit.unwrap_or(u32::MAX);
|
||||
let project = self.project.clone();
|
||||
let action_log = self.action_log.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let load = project.update(cx, |project, cx| {
|
||||
let path = project
|
||||
.project_path_for_absolute_path(&path, cx)
|
||||
.context("invalid path")?;
|
||||
anyhow::Ok(project.open_buffer(path, cx))
|
||||
});
|
||||
let buffer = load??.await?;
|
||||
let load = project
|
||||
.update(cx, |project, cx| {
|
||||
let path = project
|
||||
.project_path_for_absolute_path(&path, cx)
|
||||
.ok_or_else(|| {
|
||||
acp::Error::resource_not_found(Some(path.display().to_string()))
|
||||
})?;
|
||||
Ok(project.open_buffer(path, cx))
|
||||
})
|
||||
.map_err(|e| acp::Error::internal_error().with_data(e.to_string()))
|
||||
.flatten()?;
|
||||
|
||||
let buffer = load.await?;
|
||||
|
||||
let snapshot = if reuse_shared_snapshot {
|
||||
this.read_with(cx, |this, _| {
|
||||
@@ -1820,15 +1826,17 @@ impl AcpThread {
|
||||
};
|
||||
|
||||
let max_point = snapshot.max_point();
|
||||
if line >= max_point.row {
|
||||
anyhow::bail!(
|
||||
let start_position = Point::new(line, 0);
|
||||
|
||||
if start_position > max_point {
|
||||
return Err(acp::Error::invalid_params().with_data(format!(
|
||||
"Attempting to read beyond the end of the file, line {}:{}",
|
||||
max_point.row + 1,
|
||||
max_point.column
|
||||
);
|
||||
)));
|
||||
}
|
||||
|
||||
let start = snapshot.anchor_before(Point::new(line, 0));
|
||||
let start = snapshot.anchor_before(start_position);
|
||||
let end = snapshot.anchor_before(Point::new(line.saturating_add(limit), 0));
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
@@ -1977,7 +1985,7 @@ impl AcpThread {
|
||||
let terminal_id = terminal_id.clone();
|
||||
async move |_this, cx| {
|
||||
let env = env.await;
|
||||
let (command, args) = ShellBuilder::new(
|
||||
let (task_command, task_args) = ShellBuilder::new(
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
@@ -1988,13 +1996,13 @@ impl AcpThread {
|
||||
&Shell::Program(get_default_system_shell()),
|
||||
)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command), &args);
|
||||
.build(Some(command.clone()), &args);
|
||||
let terminal = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_terminal_task(
|
||||
task::SpawnInTerminal {
|
||||
command: Some(command.clone()),
|
||||
args: args.clone(),
|
||||
command: Some(task_command),
|
||||
args: task_args,
|
||||
cwd: cwd.clone(),
|
||||
env,
|
||||
..Default::default()
|
||||
@@ -2449,6 +2457,81 @@ mod tests {
|
||||
|
||||
assert_eq!(content, "two\nthree\n");
|
||||
|
||||
// Invalid
|
||||
let err = thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.read_text_file(path!("/tmp/foo").into(), Some(6), Some(2), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Invalid params: \"Attempting to read beyond the end of the file, line 5:0\""
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_reading_empty_file(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/tmp"), json!({"foo": ""})).await;
|
||||
let project = Project::test(fs.clone(), [], cx).await;
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.find_or_create_worktree(path!("/tmp/foo"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, Path::new(path!("/tmp")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Whole file
|
||||
let content = thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.read_text_file(path!("/tmp/foo").into(), None, None, false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(content, "");
|
||||
|
||||
// Only start line
|
||||
let content = thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.read_text_file(path!("/tmp/foo").into(), Some(1), None, false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(content, "");
|
||||
|
||||
// Only limit
|
||||
let content = thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.read_text_file(path!("/tmp/foo").into(), None, Some(2), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(content, "");
|
||||
|
||||
// Range
|
||||
let content = thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.read_text_file(path!("/tmp/foo").into(), Some(1), Some(1), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(content, "");
|
||||
|
||||
// Invalid
|
||||
let err = thread
|
||||
.update(cx, |thread, cx| {
|
||||
@@ -2459,9 +2542,40 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Attempting to read beyond the end of the file, line 5:0"
|
||||
"Invalid params: \"Attempting to read beyond the end of the file, line 1:0\""
|
||||
);
|
||||
}
|
||||
#[gpui::test]
|
||||
async fn test_reading_non_existing_file(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/tmp"), json!({})).await;
|
||||
let project = Project::test(fs.clone(), [], cx).await;
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.find_or_create_worktree(path!("/tmp"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, Path::new(path!("/tmp")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Out of project file
|
||||
let err = thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.read_text_file(path!("/foo").into(), None, None, false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(err.code, acp::ErrorCode::RESOURCE_NOT_FOUND.code);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_succeeding_canceled_toolcall(cx: &mut TestAppContext) {
|
||||
|
||||
@@ -68,7 +68,7 @@ pub trait AgentConnection {
|
||||
///
|
||||
/// If the agent does not support model selection, returns [None].
|
||||
/// This allows sharing the selector in UI components.
|
||||
fn model_selector(&self) -> Option<Rc<dyn AgentModelSelector>> {
|
||||
fn model_selector(&self, _session_id: &acp::SessionId) -> Option<Rc<dyn AgentModelSelector>> {
|
||||
None
|
||||
}
|
||||
|
||||
@@ -177,61 +177,48 @@ pub trait AgentModelSelector: 'static {
|
||||
/// If the session doesn't exist or the model is invalid, it returns an error.
|
||||
///
|
||||
/// # Parameters
|
||||
/// - `session_id`: The ID of the session (thread) to apply the model to.
|
||||
/// - `model`: The model to select (should be one from [list_models]).
|
||||
/// - `cx`: The GPUI app context.
|
||||
///
|
||||
/// # Returns
|
||||
/// A task resolving to `Ok(())` on success or an error.
|
||||
fn select_model(
|
||||
&self,
|
||||
session_id: acp::SessionId,
|
||||
model_id: AgentModelId,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<()>>;
|
||||
fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task<Result<()>>;
|
||||
|
||||
/// Retrieves the currently selected model for a specific session (thread).
|
||||
///
|
||||
/// # Parameters
|
||||
/// - `session_id`: The ID of the session (thread) to query.
|
||||
/// - `cx`: The GPUI app context.
|
||||
///
|
||||
/// # Returns
|
||||
/// A task resolving to the selected model (always set) or an error (e.g., session not found).
|
||||
fn selected_model(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<AgentModelInfo>>;
|
||||
fn selected_model(&self, cx: &mut App) -> Task<Result<AgentModelInfo>>;
|
||||
|
||||
/// Whenever the model list is updated the receiver will be notified.
|
||||
fn watch(&self, cx: &mut App) -> watch::Receiver<()>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct AgentModelId(pub SharedString);
|
||||
|
||||
impl std::ops::Deref for AgentModelId {
|
||||
type Target = SharedString;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AgentModelId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
/// Optional for agents that don't update their model list.
|
||||
fn watch(&self, _cx: &mut App) -> Option<watch::Receiver<()>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct AgentModelInfo {
|
||||
pub id: AgentModelId,
|
||||
pub id: acp::ModelId,
|
||||
pub name: SharedString,
|
||||
pub description: Option<SharedString>,
|
||||
pub icon: Option<IconName>,
|
||||
}
|
||||
|
||||
impl From<acp::ModelInfo> for AgentModelInfo {
|
||||
fn from(info: acp::ModelInfo) -> Self {
|
||||
Self {
|
||||
id: info.model_id,
|
||||
name: info.name.into(),
|
||||
description: info.description.map(|desc| desc.into()),
|
||||
icon: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct AgentModelGroupName(pub SharedString);
|
||||
|
||||
|
||||
@@ -6,12 +6,7 @@ use itertools::Itertools;
|
||||
use language::{
|
||||
Anchor, Buffer, Capability, LanguageRegistry, OffsetRangeExt as _, Point, Rope, TextBuffer,
|
||||
};
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub enum Diff {
|
||||
@@ -21,7 +16,7 @@ pub enum Diff {
|
||||
|
||||
impl Diff {
|
||||
pub fn finalized(
|
||||
path: PathBuf,
|
||||
path: String,
|
||||
old_text: Option<String>,
|
||||
new_text: String,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
@@ -36,7 +31,7 @@ impl Diff {
|
||||
let buffer = new_buffer.clone();
|
||||
async move |_, cx| {
|
||||
let language = language_registry
|
||||
.language_for_file_path(&path)
|
||||
.language_for_file_path(Path::new(&path))
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
@@ -152,12 +147,15 @@ impl Diff {
|
||||
let path = match self {
|
||||
Diff::Pending(PendingDiff {
|
||||
new_buffer: buffer, ..
|
||||
}) => buffer.read(cx).file().map(|file| file.path().as_ref()),
|
||||
Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_path()),
|
||||
}) => buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| file.path().display(file.path_style(cx))),
|
||||
Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_str().into()),
|
||||
};
|
||||
format!(
|
||||
"Diff: {}\n```\n{}\n```\n",
|
||||
path.unwrap_or(Path::new("untitled")).display(),
|
||||
path.unwrap_or("untitled".into()),
|
||||
buffer_text
|
||||
)
|
||||
}
|
||||
@@ -244,8 +242,8 @@ impl PendingDiff {
|
||||
.new_buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| file.path().as_ref())
|
||||
.unwrap_or(Path::new("untitled"))
|
||||
.map(|file| file.path().display(file.path_style(cx)))
|
||||
.unwrap_or("untitled".into())
|
||||
.into();
|
||||
|
||||
// Replace the buffer in the multibuffer with the snapshot
|
||||
@@ -348,7 +346,7 @@ impl PendingDiff {
|
||||
}
|
||||
|
||||
pub struct FinalizedDiff {
|
||||
path: PathBuf,
|
||||
path: String,
|
||||
base_text: Arc<String>,
|
||||
new_buffer: Entity<Buffer>,
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
|
||||
@@ -126,6 +126,39 @@ impl MentionUri {
|
||||
abs_path: None,
|
||||
line_range,
|
||||
})
|
||||
} else if let Some(name) = path.strip_prefix("/agent/symbol/") {
|
||||
let fragment = url
|
||||
.fragment()
|
||||
.context("Missing fragment for untitled buffer selection")?;
|
||||
let line_range = parse_line_range(fragment)?;
|
||||
let path =
|
||||
single_query_param(&url, "path")?.context("Missing path for symbol")?;
|
||||
Ok(Self::Symbol {
|
||||
name: name.to_string(),
|
||||
abs_path: path.into(),
|
||||
line_range,
|
||||
})
|
||||
} else if path.starts_with("/agent/file") {
|
||||
let path =
|
||||
single_query_param(&url, "path")?.context("Missing path for file")?;
|
||||
Ok(Self::File {
|
||||
abs_path: path.into(),
|
||||
})
|
||||
} else if path.starts_with("/agent/directory") {
|
||||
let path =
|
||||
single_query_param(&url, "path")?.context("Missing path for directory")?;
|
||||
Ok(Self::Directory {
|
||||
abs_path: path.into(),
|
||||
})
|
||||
} else if path.starts_with("/agent/selection") {
|
||||
let fragment = url.fragment().context("Missing fragment for selection")?;
|
||||
let line_range = parse_line_range(fragment)?;
|
||||
let path =
|
||||
single_query_param(&url, "path")?.context("Missing path for selection")?;
|
||||
Ok(Self::Selection {
|
||||
abs_path: Some(path.into()),
|
||||
line_range,
|
||||
})
|
||||
} else {
|
||||
bail!("invalid zed url: {:?}", input);
|
||||
}
|
||||
@@ -180,20 +213,29 @@ impl MentionUri {
|
||||
pub fn to_uri(&self) -> Url {
|
||||
match self {
|
||||
MentionUri::File { abs_path } => {
|
||||
Url::from_file_path(abs_path).expect("mention path should be absolute")
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/file");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
url
|
||||
}
|
||||
MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
|
||||
MentionUri::Directory { abs_path } => {
|
||||
Url::from_directory_path(abs_path).expect("mention path should be absolute")
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/directory");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
url
|
||||
}
|
||||
MentionUri::Symbol {
|
||||
abs_path,
|
||||
name,
|
||||
line_range,
|
||||
} => {
|
||||
let mut url =
|
||||
Url::from_file_path(abs_path).expect("mention path should be absolute");
|
||||
url.query_pairs_mut().append_pair("symbol", name);
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path(&format!("/agent/symbol/{name}"));
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
url.set_fragment(Some(&format!(
|
||||
"L{}:{}",
|
||||
line_range.start() + 1,
|
||||
@@ -202,15 +244,16 @@ impl MentionUri {
|
||||
url
|
||||
}
|
||||
MentionUri::Selection {
|
||||
abs_path: path,
|
||||
abs_path,
|
||||
line_range,
|
||||
} => {
|
||||
let mut url = if let Some(path) = path {
|
||||
Url::from_file_path(path).expect("mention path should be absolute")
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
if let Some(abs_path) = abs_path {
|
||||
url.set_path("/agent/selection");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
} else {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/untitled-buffer");
|
||||
url
|
||||
};
|
||||
url.set_fragment(Some(&format!(
|
||||
"L{}:{}",
|
||||
@@ -295,37 +338,32 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_parse_file_uri() {
|
||||
let file_uri = uri!("file:///path/to/file.rs");
|
||||
let parsed = MentionUri::parse(file_uri).unwrap();
|
||||
let old_uri = uri!("file:///path/to/file.rs");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::File { abs_path } => {
|
||||
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs"));
|
||||
}
|
||||
_ => panic!("Expected File variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), file_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/file"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_directory_uri() {
|
||||
let file_uri = uri!("file:///path/to/dir/");
|
||||
let parsed = MentionUri::parse(file_uri).unwrap();
|
||||
let old_uri = uri!("file:///path/to/dir/");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Directory { abs_path } => {
|
||||
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/"));
|
||||
}
|
||||
_ => panic!("Expected Directory variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), file_uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_directory_uri_with_slash() {
|
||||
let uri = MentionUri::Directory {
|
||||
abs_path: PathBuf::from(path!("/path/to/dir/")),
|
||||
};
|
||||
let expected = uri!("file:///path/to/dir/");
|
||||
assert_eq!(uri.to_uri().to_string(), expected);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/directory"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -333,14 +371,15 @@ mod tests {
|
||||
let uri = MentionUri::Directory {
|
||||
abs_path: PathBuf::from(path!("/path/to/dir")),
|
||||
};
|
||||
let expected = uri!("file:///path/to/dir/");
|
||||
assert_eq!(uri.to_uri().to_string(), expected);
|
||||
let uri_string = uri.to_uri().to_string();
|
||||
assert!(uri_string.starts_with("zed:///agent/directory"));
|
||||
assert_eq!(MentionUri::parse(&uri_string).unwrap(), uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_symbol_uri() {
|
||||
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
|
||||
let parsed = MentionUri::parse(symbol_uri).unwrap();
|
||||
let old_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Symbol {
|
||||
abs_path: path,
|
||||
@@ -354,13 +393,15 @@ mod tests {
|
||||
}
|
||||
_ => panic!("Expected Symbol variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), symbol_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/symbol/MySymbol"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_selection_uri() {
|
||||
let selection_uri = uri!("file:///path/to/file.rs#L5:15");
|
||||
let parsed = MentionUri::parse(selection_uri).unwrap();
|
||||
let old_uri = uri!("file:///path/to/file.rs#L5:15");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Selection {
|
||||
abs_path: path,
|
||||
@@ -375,7 +416,9 @@ mod tests {
|
||||
}
|
||||
_ => panic!("Expected Selection variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), selection_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/selection"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -8,10 +8,7 @@ use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
use text::{Edit, Patch, Rope};
|
||||
use util::{
|
||||
RangeExt, ResultExt as _,
|
||||
paths::{PathStyle, RemotePathBuf},
|
||||
};
|
||||
use util::{RangeExt, ResultExt as _};
|
||||
|
||||
/// Tracks actions performed by tools in a thread
|
||||
pub struct ActionLog {
|
||||
@@ -62,7 +59,13 @@ impl ActionLog {
|
||||
let file_path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
|
||||
.map(|file| {
|
||||
let mut path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
if file.path_style(cx).is_windows() {
|
||||
path = path.replace('\\', "/");
|
||||
}
|
||||
path
|
||||
})
|
||||
.unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
|
||||
|
||||
let mut result = String::new();
|
||||
@@ -2301,7 +2304,7 @@ mod tests {
|
||||
.await;
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
|
||||
&[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
|
||||
"0000000",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
@@ -2384,7 +2387,7 @@ mod tests {
|
||||
// - Ignores the last line edit (j stays as j)
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
|
||||
&[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
|
||||
"0000001",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
@@ -2415,10 +2418,7 @@ mod tests {
|
||||
// Make another commit that accepts the NEW line but with different content
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[(
|
||||
"file.txt".into(),
|
||||
"A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
|
||||
)],
|
||||
&[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
|
||||
"0000002",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
@@ -2444,7 +2444,7 @@ mod tests {
|
||||
// Final commit that accepts all remaining edits
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
|
||||
&[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
|
||||
"0000003",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -9,12 +9,14 @@ pub mod tool_use;
|
||||
|
||||
pub use context::{AgentContext, ContextId, ContextLoadResult};
|
||||
pub use context_store::ContextStore;
|
||||
use fs::Fs;
|
||||
use std::sync::Arc;
|
||||
pub use thread::{
|
||||
LastRestoreCheckpoint, Message, MessageCrease, MessageId, MessageSegment, Thread, ThreadError,
|
||||
ThreadEvent, ThreadFeedback, ThreadId, ThreadSummary, TokenUsageRatio,
|
||||
};
|
||||
pub use thread_store::{SerializedThread, TextThreadStore, ThreadStore};
|
||||
|
||||
pub fn init(cx: &mut gpui::App) {
|
||||
thread_store::init(cx);
|
||||
pub fn init(fs: Arc<dyn Fs>, cx: &mut gpui::App) {
|
||||
thread_store::init(fs, cx);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ use std::path::PathBuf;
|
||||
use std::{ops::Range, path::Path, sync::Arc};
|
||||
use text::{Anchor, OffsetRangeExt as _};
|
||||
use util::markdown::MarkdownCodeBlock;
|
||||
use util::rel_path::RelPath;
|
||||
use util::{ResultExt as _, post_inc};
|
||||
|
||||
pub const RULES_ICON: IconName = IconName::Reader;
|
||||
@@ -158,7 +159,7 @@ pub struct FileContextHandle {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileContext {
|
||||
pub handle: FileContextHandle,
|
||||
pub full_path: Arc<Path>,
|
||||
pub full_path: String,
|
||||
pub text: SharedString,
|
||||
pub is_outline: bool,
|
||||
}
|
||||
@@ -186,7 +187,7 @@ impl FileContextHandle {
|
||||
log::error!("file context missing path");
|
||||
return Task::ready(None);
|
||||
};
|
||||
let full_path: Arc<Path> = file.full_path(cx).into();
|
||||
let full_path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
let rope = buffer_ref.as_rope().clone();
|
||||
let buffer = self.buffer.clone();
|
||||
|
||||
@@ -235,14 +236,14 @@ pub struct DirectoryContextHandle {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DirectoryContext {
|
||||
pub handle: DirectoryContextHandle,
|
||||
pub full_path: Arc<Path>,
|
||||
pub full_path: String,
|
||||
pub descendants: Vec<DirectoryContextDescendant>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DirectoryContextDescendant {
|
||||
/// Path within the directory.
|
||||
pub rel_path: Arc<Path>,
|
||||
pub rel_path: Arc<RelPath>,
|
||||
pub fenced_codeblock: SharedString,
|
||||
}
|
||||
|
||||
@@ -273,13 +274,16 @@ impl DirectoryContextHandle {
|
||||
}
|
||||
|
||||
let directory_path = entry.path.clone();
|
||||
let directory_full_path = worktree_ref.full_path(&directory_path).into();
|
||||
let directory_full_path = worktree_ref
|
||||
.full_path(&directory_path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let file_paths = collect_files_in_path(worktree_ref, &directory_path);
|
||||
let descendants_future = future::join_all(file_paths.into_iter().map(|path| {
|
||||
let worktree_ref = worktree.read(cx);
|
||||
let worktree_id = worktree_ref.id();
|
||||
let full_path = worktree_ref.full_path(&path);
|
||||
let full_path = worktree_ref.full_path(&path).to_string_lossy().into_owned();
|
||||
|
||||
let rel_path = path
|
||||
.strip_prefix(&directory_path)
|
||||
@@ -360,7 +364,7 @@ pub struct SymbolContextHandle {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SymbolContext {
|
||||
pub handle: SymbolContextHandle,
|
||||
pub full_path: Arc<Path>,
|
||||
pub full_path: String,
|
||||
pub line_range: Range<Point>,
|
||||
pub text: SharedString,
|
||||
}
|
||||
@@ -399,7 +403,7 @@ impl SymbolContextHandle {
|
||||
log::error!("symbol context's file has no path");
|
||||
return Task::ready(None);
|
||||
};
|
||||
let full_path = file.full_path(cx).into();
|
||||
let full_path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
let line_range = self.enclosing_range.to_point(&buffer_ref.snapshot());
|
||||
let text = self.text(cx);
|
||||
let buffer = self.buffer.clone();
|
||||
@@ -433,7 +437,7 @@ pub struct SelectionContextHandle {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SelectionContext {
|
||||
pub handle: SelectionContextHandle,
|
||||
pub full_path: Arc<Path>,
|
||||
pub full_path: String,
|
||||
pub line_range: Range<Point>,
|
||||
pub text: SharedString,
|
||||
}
|
||||
@@ -472,7 +476,7 @@ impl SelectionContextHandle {
|
||||
let text = self.text(cx);
|
||||
let buffer = self.buffer.clone();
|
||||
let context = AgentContext::Selection(SelectionContext {
|
||||
full_path: full_path.into(),
|
||||
full_path: full_path.to_string_lossy().into_owned(),
|
||||
line_range: self.line_range(cx),
|
||||
text,
|
||||
handle: self,
|
||||
@@ -702,7 +706,7 @@ impl Display for RulesContext {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImageContext {
|
||||
pub project_path: Option<ProjectPath>,
|
||||
pub full_path: Option<Arc<Path>>,
|
||||
pub full_path: Option<String>,
|
||||
pub original_image: Arc<gpui::Image>,
|
||||
// TODO: handle this elsewhere and remove `ignore-interior-mutability` opt-out in clippy.toml
|
||||
// needed due to a false positive of `clippy::mutable_key_type`.
|
||||
@@ -968,7 +972,7 @@ pub fn load_context(
|
||||
})
|
||||
}
|
||||
|
||||
fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<Arc<Path>> {
|
||||
fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<Arc<RelPath>> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
for entry in worktree.child_entries(path) {
|
||||
@@ -982,14 +986,17 @@ fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<Arc<Path>> {
|
||||
files
|
||||
}
|
||||
|
||||
fn codeblock_tag(full_path: &Path, line_range: Option<Range<Point>>) -> String {
|
||||
fn codeblock_tag(full_path: &str, line_range: Option<Range<Point>>) -> String {
|
||||
let mut result = String::new();
|
||||
|
||||
if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) {
|
||||
if let Some(extension) = Path::new(full_path)
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
{
|
||||
let _ = write!(result, "{} ", extension);
|
||||
}
|
||||
|
||||
let _ = write!(result, "{}", full_path.display());
|
||||
let _ = write!(result, "{}", full_path);
|
||||
|
||||
if let Some(range) = line_range {
|
||||
if range.start.row == range.end.row {
|
||||
|
||||
@@ -14,7 +14,10 @@ use futures::{self, FutureExt};
|
||||
use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity};
|
||||
use language::{Buffer, File as _};
|
||||
use language_model::LanguageModelImage;
|
||||
use project::{Project, ProjectItem, ProjectPath, Symbol, image_store::is_image_file};
|
||||
use project::{
|
||||
Project, ProjectItem, ProjectPath, Symbol, image_store::is_image_file,
|
||||
lsp_store::SymbolLocation,
|
||||
};
|
||||
use prompt_store::UserPromptId;
|
||||
use ref_cast::RefCast as _;
|
||||
use std::{
|
||||
@@ -309,7 +312,7 @@ impl ContextStore {
|
||||
let item = image_item.read(cx);
|
||||
this.insert_image(
|
||||
Some(item.project_path(cx)),
|
||||
Some(item.file.full_path(cx).into()),
|
||||
Some(item.file.full_path(cx).to_string_lossy().into_owned()),
|
||||
item.image.clone(),
|
||||
remove_if_exists,
|
||||
cx,
|
||||
@@ -325,7 +328,7 @@ impl ContextStore {
|
||||
fn insert_image(
|
||||
&mut self,
|
||||
project_path: Option<ProjectPath>,
|
||||
full_path: Option<Arc<Path>>,
|
||||
full_path: Option<String>,
|
||||
image: Arc<Image>,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<ContextStore>,
|
||||
@@ -500,7 +503,7 @@ impl ContextStore {
|
||||
let Some(context_path) = buffer.project_path(cx) else {
|
||||
return false;
|
||||
};
|
||||
if context_path != symbol.path {
|
||||
if symbol.path != SymbolLocation::InProject(context_path) {
|
||||
return false;
|
||||
}
|
||||
let context_range = context.range.to_point_utf16(&buffer.snapshot());
|
||||
|
||||
@@ -155,7 +155,7 @@ impl HistoryStore {
|
||||
.iter()
|
||||
.filter_map(|entry| match entry {
|
||||
HistoryEntryId::Context(path) => path.file_name().map(|file| {
|
||||
SerializedRecentOpen::ContextName(file.to_string_lossy().to_string())
|
||||
SerializedRecentOpen::ContextName(file.to_string_lossy().into_owned())
|
||||
}),
|
||||
HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
|
||||
})
|
||||
|
||||
@@ -234,7 +234,6 @@ impl MessageSegment {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct ProjectSnapshot {
|
||||
pub worktree_snapshots: Vec<WorktreeSnapshot>,
|
||||
pub unsaved_buffer_paths: Vec<String>,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
@@ -2857,27 +2856,11 @@ impl Thread {
|
||||
.map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx))
|
||||
.collect();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
cx.spawn(async move |_, _| {
|
||||
let worktree_snapshots = futures::future::join_all(worktree_snapshots).await;
|
||||
|
||||
let mut unsaved_buffers = Vec::new();
|
||||
cx.update(|app_cx| {
|
||||
let buffer_store = project.read(app_cx).buffer_store();
|
||||
for buffer_handle in buffer_store.read(app_cx).buffers() {
|
||||
let buffer = buffer_handle.read(app_cx);
|
||||
if buffer.is_dirty()
|
||||
&& let Some(file) = buffer.file()
|
||||
{
|
||||
let path = file.path().to_string_lossy().to_string();
|
||||
unsaved_buffers.push(path);
|
||||
}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
Arc::new(ProjectSnapshot {
|
||||
worktree_snapshots,
|
||||
unsaved_buffer_paths: unsaved_buffers,
|
||||
timestamp: Utc::now(),
|
||||
})
|
||||
})
|
||||
@@ -2892,7 +2875,7 @@ impl Thread {
|
||||
// Get worktree path and snapshot
|
||||
let worktree_info = cx.update(|app_cx| {
|
||||
let worktree = worktree.read(app_cx);
|
||||
let path = worktree.abs_path().to_string_lossy().to_string();
|
||||
let path = worktree.abs_path().to_string_lossy().into_owned();
|
||||
let snapshot = worktree.snapshot();
|
||||
(path, snapshot)
|
||||
});
|
||||
@@ -3275,6 +3258,7 @@ mod tests {
|
||||
use agent_settings::{AgentProfileId, AgentSettings};
|
||||
use assistant_tool::ToolRegistry;
|
||||
use assistant_tools;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use futures::future::BoxFuture;
|
||||
use futures::stream::BoxStream;
|
||||
@@ -3298,9 +3282,10 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_message_with_context(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
@@ -3375,9 +3360,10 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_only_include_new_contexts(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({
|
||||
"file1.rs": "fn function1() {}\n",
|
||||
@@ -3531,9 +3517,10 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_message_without_files(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
@@ -3610,9 +3597,10 @@ fn main() {{
|
||||
#[gpui::test]
|
||||
#[ignore] // turn this test on when project_notifications tool is re-enabled
|
||||
async fn test_stale_buffer_notification(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
@@ -3738,9 +3726,10 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_storing_profile_setting_per_thread(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
@@ -3760,9 +3749,10 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_serializing_thread_profile(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
@@ -3803,9 +3793,10 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_temperature_setting(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
&fs,
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
@@ -3897,9 +3888,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_thread_summary(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
|
||||
let (_, _thread_store, thread, _context_store, model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
@@ -3982,9 +3973,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_thread_summary_error_set_manually(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
|
||||
let (_, _thread_store, thread, _context_store, model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
@@ -4004,9 +3995,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_thread_summary_error_retry(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
|
||||
let (_, _thread_store, thread, _context_store, model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
@@ -4158,9 +4149,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_retry_on_overloaded_error(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -4236,9 +4227,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_retry_on_internal_server_error(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -4318,9 +4309,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_exponential_backoff_on_retries(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -4438,9 +4429,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_max_retries_exceeded(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -4529,9 +4520,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_retry_message_removed_on_retry(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -4702,9 +4693,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_successful_completion_clears_retry_state(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -4868,9 +4859,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_rate_limit_retry_single_attempt(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -5053,9 +5044,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ui_only_messages_not_sent_to_model(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Insert a regular user message
|
||||
@@ -5153,9 +5144,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_no_retry_without_burn_mode(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Ensure we're in Normal mode (not Burn mode)
|
||||
@@ -5226,9 +5217,9 @@ fn main() {{
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_retry_canceled_on_stop(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
let fs = init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let project = create_test_project(&fs, cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Enable Burn Mode to allow retries
|
||||
@@ -5334,7 +5325,8 @@ fn main() {{
|
||||
cx.run_until_parked();
|
||||
}
|
||||
|
||||
fn init_test_settings(cx: &mut TestAppContext) {
|
||||
fn init_test_settings(cx: &mut TestAppContext) -> Arc<dyn Fs> {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
@@ -5342,7 +5334,7 @@ fn main() {{
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
thread_store::init(cx);
|
||||
thread_store::init(fs.clone(), cx);
|
||||
workspace::init_settings(cx);
|
||||
language_model::init_settings(cx);
|
||||
ThemeSettings::register(cx);
|
||||
@@ -5356,16 +5348,17 @@ fn main() {{
|
||||
));
|
||||
assistant_tools::init(http_client, cx);
|
||||
});
|
||||
fs
|
||||
}
|
||||
|
||||
// Helper to create a test project with test files
|
||||
async fn create_test_project(
|
||||
fs: &Arc<dyn Fs>,
|
||||
cx: &mut TestAppContext,
|
||||
files: serde_json::Value,
|
||||
) -> Entity<Project> {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/test"), files).await;
|
||||
Project::test(fs, [path!("/test").as_ref()], cx).await
|
||||
fs.as_fake().insert_tree(path!("/test"), files).await;
|
||||
Project::test(fs.clone(), [path!("/test").as_ref()], cx).await
|
||||
}
|
||||
|
||||
async fn setup_test_environment(
|
||||
|
||||
@@ -10,6 +10,7 @@ use assistant_tool::{Tool, ToolId, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::HashMap;
|
||||
use context_server::ContextServerId;
|
||||
use fs::{Fs, RemoveOptions};
|
||||
use futures::{
|
||||
FutureExt as _, StreamExt as _,
|
||||
channel::{mpsc, oneshot},
|
||||
@@ -39,7 +40,7 @@ use std::{
|
||||
rc::Rc,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use util::{ResultExt as _, rel_path::RelPath};
|
||||
|
||||
use zed_env_vars::ZED_STATELESS;
|
||||
|
||||
@@ -85,8 +86,8 @@ const RULES_FILE_NAMES: [&str; 9] = [
|
||||
"GEMINI.md",
|
||||
];
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
ThreadsDatabase::init(cx);
|
||||
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
ThreadsDatabase::init(fs, cx);
|
||||
}
|
||||
|
||||
/// A system prompt shared by all threads created by this ThreadStore
|
||||
@@ -234,7 +235,7 @@ impl ThreadStore {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == Path::new(name))
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
self.enqueue_system_prompt_reload();
|
||||
}
|
||||
@@ -327,7 +328,7 @@ impl ThreadStore {
|
||||
cx: &mut App,
|
||||
) -> Task<(WorktreeContext, Option<RulesLoadingError>)> {
|
||||
let tree = worktree.read(cx);
|
||||
let root_name = tree.root_name().into();
|
||||
let root_name = tree.root_name_str().into();
|
||||
let abs_path = tree.abs_path();
|
||||
|
||||
let mut context = WorktreeContext {
|
||||
@@ -367,7 +368,7 @@ impl ThreadStore {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(name)
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
@@ -869,13 +870,13 @@ impl ThreadsDatabase {
|
||||
GlobalThreadsDatabase::global(cx).0.clone()
|
||||
}
|
||||
|
||||
fn init(cx: &mut App) {
|
||||
fn init(fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
let executor = cx.background_executor().clone();
|
||||
let database_future = executor
|
||||
.spawn({
|
||||
let executor = executor.clone();
|
||||
let threads_dir = paths::data_dir().join("threads");
|
||||
async move { ThreadsDatabase::new(threads_dir, executor) }
|
||||
async move { ThreadsDatabase::new(fs, threads_dir, executor).await }
|
||||
})
|
||||
.then(|result| future::ready(result.map(Arc::new).map_err(Arc::new)))
|
||||
.boxed()
|
||||
@@ -884,13 +885,17 @@ impl ThreadsDatabase {
|
||||
cx.set_global(GlobalThreadsDatabase(database_future));
|
||||
}
|
||||
|
||||
pub fn new(threads_dir: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&threads_dir)?;
|
||||
pub async fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
threads_dir: PathBuf,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Result<Self> {
|
||||
fs.create_dir(&threads_dir).await?;
|
||||
|
||||
let sqlite_path = threads_dir.join("threads.db");
|
||||
let mdb_path = threads_dir.join("threads-db.1.mdb");
|
||||
|
||||
let needs_migration_from_heed = mdb_path.exists();
|
||||
let needs_migration_from_heed = fs.is_file(&mdb_path).await;
|
||||
|
||||
let connection = if *ZED_STATELESS {
|
||||
Connection::open_memory(Some("THREAD_FALLBACK_DB"))
|
||||
@@ -932,7 +937,14 @@ impl ThreadsDatabase {
|
||||
.spawn(async move {
|
||||
log::info!("Starting threads.db migration");
|
||||
Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?;
|
||||
std::fs::remove_dir_all(mdb_path)?;
|
||||
fs.remove_dir(
|
||||
&mdb_path,
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
log::info!("threads.db migrated to sqlite");
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
|
||||
@@ -27,6 +27,7 @@ use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use util::ResultExt;
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
const RULES_FILE_NAMES: [&str; 9] = [
|
||||
".rules",
|
||||
@@ -56,7 +57,7 @@ struct Session {
|
||||
|
||||
pub struct LanguageModels {
|
||||
/// Access language model by ID
|
||||
models: HashMap<acp_thread::AgentModelId, Arc<dyn LanguageModel>>,
|
||||
models: HashMap<acp::ModelId, Arc<dyn LanguageModel>>,
|
||||
/// Cached list for returning language model information
|
||||
model_list: acp_thread::AgentModelList,
|
||||
refresh_models_rx: watch::Receiver<()>,
|
||||
@@ -132,10 +133,7 @@ impl LanguageModels {
|
||||
self.refresh_models_rx.clone()
|
||||
}
|
||||
|
||||
pub fn model_from_id(
|
||||
&self,
|
||||
model_id: &acp_thread::AgentModelId,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
pub fn model_from_id(&self, model_id: &acp::ModelId) -> Option<Arc<dyn LanguageModel>> {
|
||||
self.models.get(model_id).cloned()
|
||||
}
|
||||
|
||||
@@ -146,12 +144,13 @@ impl LanguageModels {
|
||||
acp_thread::AgentModelInfo {
|
||||
id: Self::model_id(model),
|
||||
name: model.name().0,
|
||||
description: None,
|
||||
icon: Some(provider.icon()),
|
||||
}
|
||||
}
|
||||
|
||||
fn model_id(model: &Arc<dyn LanguageModel>) -> acp_thread::AgentModelId {
|
||||
acp_thread::AgentModelId(format!("{}/{}", model.provider_id().0, model.id().0).into())
|
||||
fn model_id(model: &Arc<dyn LanguageModel>) -> acp::ModelId {
|
||||
acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into())
|
||||
}
|
||||
|
||||
fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> {
|
||||
@@ -436,7 +435,7 @@ impl NativeAgent {
|
||||
cx: &mut App,
|
||||
) -> Task<(WorktreeContext, Option<RulesLoadingError>)> {
|
||||
let tree = worktree.read(cx);
|
||||
let root_name = tree.root_name().into();
|
||||
let root_name = tree.root_name_str().into();
|
||||
let abs_path = tree.abs_path();
|
||||
|
||||
let mut context = WorktreeContext {
|
||||
@@ -476,7 +475,7 @@ impl NativeAgent {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(name)
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
@@ -560,7 +559,7 @@ impl NativeAgent {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == Path::new(name))
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
@@ -836,10 +835,15 @@ impl NativeAgentConnection {
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentModelSelector for NativeAgentConnection {
|
||||
struct NativeAgentModelSelector {
|
||||
session_id: acp::SessionId,
|
||||
connection: NativeAgentConnection,
|
||||
}
|
||||
|
||||
impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
|
||||
fn list_models(&self, cx: &mut App) -> Task<Result<acp_thread::AgentModelList>> {
|
||||
log::debug!("NativeAgentConnection::list_models called");
|
||||
let list = self.0.read(cx).models.model_list.clone();
|
||||
let list = self.connection.0.read(cx).models.model_list.clone();
|
||||
Task::ready(if list.is_empty() {
|
||||
Err(anyhow::anyhow!("No models available"))
|
||||
} else {
|
||||
@@ -847,24 +851,24 @@ impl AgentModelSelector for NativeAgentConnection {
|
||||
})
|
||||
}
|
||||
|
||||
fn select_model(
|
||||
&self,
|
||||
session_id: acp::SessionId,
|
||||
model_id: acp_thread::AgentModelId,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<()>> {
|
||||
log::debug!("Setting model for session {}: {}", session_id, model_id);
|
||||
fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task<Result<()>> {
|
||||
log::debug!(
|
||||
"Setting model for session {}: {}",
|
||||
self.session_id,
|
||||
model_id
|
||||
);
|
||||
let Some(thread) = self
|
||||
.connection
|
||||
.0
|
||||
.read(cx)
|
||||
.sessions
|
||||
.get(&session_id)
|
||||
.get(&self.session_id)
|
||||
.map(|session| session.thread.clone())
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("Session not found")));
|
||||
};
|
||||
|
||||
let Some(model) = self.0.read(cx).models.model_from_id(&model_id) else {
|
||||
let Some(model) = self.connection.0.read(cx).models.model_from_id(&model_id) else {
|
||||
return Task::ready(Err(anyhow!("Invalid model ID {}", model_id)));
|
||||
};
|
||||
|
||||
@@ -872,33 +876,32 @@ impl AgentModelSelector for NativeAgentConnection {
|
||||
thread.set_model(model.clone(), cx);
|
||||
});
|
||||
|
||||
update_settings_file(self.0.read(cx).fs.clone(), cx, move |settings, _cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model = model.id().0.to_string();
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_model(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
});
|
||||
update_settings_file(
|
||||
self.connection.0.read(cx).fs.clone(),
|
||||
cx,
|
||||
move |settings, _cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model = model.id().0.to_string();
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_model(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn selected_model(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<acp_thread::AgentModelInfo>> {
|
||||
let session_id = session_id.clone();
|
||||
|
||||
fn selected_model(&self, cx: &mut App) -> Task<Result<acp_thread::AgentModelInfo>> {
|
||||
let Some(thread) = self
|
||||
.connection
|
||||
.0
|
||||
.read(cx)
|
||||
.sessions
|
||||
.get(&session_id)
|
||||
.get(&self.session_id)
|
||||
.map(|session| session.thread.clone())
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("Session not found")));
|
||||
@@ -915,8 +918,8 @@ impl AgentModelSelector for NativeAgentConnection {
|
||||
)))
|
||||
}
|
||||
|
||||
fn watch(&self, cx: &mut App) -> watch::Receiver<()> {
|
||||
self.0.read(cx).models.watch()
|
||||
fn watch(&self, cx: &mut App) -> Option<watch::Receiver<()>> {
|
||||
Some(self.connection.0.read(cx).models.watch())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -972,8 +975,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn model_selector(&self) -> Option<Rc<dyn AgentModelSelector>> {
|
||||
Some(Rc::new(self.clone()) as Rc<dyn AgentModelSelector>)
|
||||
fn model_selector(&self, session_id: &acp::SessionId) -> Option<Rc<dyn AgentModelSelector>> {
|
||||
Some(Rc::new(NativeAgentModelSelector {
|
||||
session_id: session_id.clone(),
|
||||
connection: self.clone(),
|
||||
}) as Rc<dyn AgentModelSelector>)
|
||||
}
|
||||
|
||||
fn prompt(
|
||||
@@ -1196,16 +1202,14 @@ mod tests {
|
||||
use crate::HistoryEntryId;
|
||||
|
||||
use super::*;
|
||||
use acp_thread::{
|
||||
AgentConnection, AgentModelGroupName, AgentModelId, AgentModelInfo, MentionUri,
|
||||
};
|
||||
use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri};
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use indoc::formatdoc;
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
use util::{path, rel_path::rel_path};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_maintaining_project_context(cx: &mut TestAppContext) {
|
||||
@@ -1255,14 +1259,17 @@ mod tests {
|
||||
fs.insert_file("/a/.rules", Vec::new()).await;
|
||||
cx.run_until_parked();
|
||||
agent.read_with(cx, |agent, cx| {
|
||||
let rules_entry = worktree.read(cx).entry_for_path(".rules").unwrap();
|
||||
let rules_entry = worktree
|
||||
.read(cx)
|
||||
.entry_for_path(rel_path(".rules"))
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
agent.project_context.read(cx).worktrees,
|
||||
vec![WorktreeContext {
|
||||
root_name: "a".into(),
|
||||
abs_path: Path::new("/a").into(),
|
||||
rules_file: Some(RulesFileContext {
|
||||
path_in_worktree: Path::new(".rules").into(),
|
||||
path_in_worktree: rel_path(".rules").into(),
|
||||
text: "".into(),
|
||||
project_entry_id: rules_entry.id.to_usize()
|
||||
})
|
||||
@@ -1292,7 +1299,25 @@ mod tests {
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
let models = cx.update(|cx| connection.list_models(cx)).await.unwrap();
|
||||
// Create a thread/session
|
||||
let acp_thread = cx
|
||||
.update(|cx| {
|
||||
Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone());
|
||||
|
||||
let models = cx
|
||||
.update(|cx| {
|
||||
connection
|
||||
.model_selector(&session_id)
|
||||
.unwrap()
|
||||
.list_models(cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let acp_thread::AgentModelList::Grouped(models) = models else {
|
||||
panic!("Unexpected model group");
|
||||
@@ -1302,8 +1327,9 @@ mod tests {
|
||||
IndexMap::from_iter([(
|
||||
AgentModelGroupName("Fake".into()),
|
||||
vec![AgentModelInfo {
|
||||
id: AgentModelId("fake/fake".into()),
|
||||
id: acp::ModelId("fake/fake".into()),
|
||||
name: "Fake".into(),
|
||||
description: None,
|
||||
icon: Some(ui::IconName::ZedAssistant),
|
||||
}]
|
||||
)])
|
||||
@@ -1360,8 +1386,9 @@ mod tests {
|
||||
let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone());
|
||||
|
||||
// Select a model
|
||||
let model_id = AgentModelId("fake/fake".into());
|
||||
cx.update(|cx| connection.select_model(session_id.clone(), model_id.clone(), cx))
|
||||
let selector = connection.model_selector(&session_id).unwrap();
|
||||
let model_id = acp::ModelId("fake/fake".into());
|
||||
cx.update(|cx| selector.select_model(model_id.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -1475,13 +1502,17 @@ mod tests {
|
||||
summary_model.end_last_completion_stream();
|
||||
|
||||
send.await.unwrap();
|
||||
let uri = MentionUri::File {
|
||||
abs_path: path!("/a/b.md").into(),
|
||||
}
|
||||
.to_uri();
|
||||
acp_thread.read_with(cx, |thread, cx| {
|
||||
assert_eq!(
|
||||
thread.to_markdown(cx),
|
||||
indoc! {"
|
||||
formatdoc! {"
|
||||
## User
|
||||
|
||||
What does [@b.md](file:///a/b.md) mean?
|
||||
What does [@b.md]({uri}) mean?
|
||||
|
||||
## Assistant
|
||||
|
||||
@@ -1517,10 +1548,10 @@ mod tests {
|
||||
acp_thread.read_with(cx, |thread, cx| {
|
||||
assert_eq!(
|
||||
thread.to_markdown(cx),
|
||||
indoc! {"
|
||||
formatdoc! {"
|
||||
## User
|
||||
|
||||
What does [@b.md](file:///a/b.md) mean?
|
||||
What does [@b.md]({uri}) mean?
|
||||
|
||||
## Assistant
|
||||
|
||||
|
||||
@@ -422,17 +422,15 @@ mod tests {
|
||||
use agent::MessageSegment;
|
||||
use agent::context::LoadedContext;
|
||||
use client::Client;
|
||||
use fs::FakeFs;
|
||||
use fs::{FakeFs, Fs};
|
||||
use gpui::AppContext;
|
||||
use gpui::TestAppContext;
|
||||
use http_client::FakeHttpClient;
|
||||
use language_model::Role;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use util::test::TempTree;
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
fn init_test(fs: Arc<dyn Fs>, cx: &mut TestAppContext) {
|
||||
env_logger::try_init().ok();
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
@@ -443,7 +441,7 @@ mod tests {
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||
let client = Client::new(clock, http_client, cx);
|
||||
agent::init(cx);
|
||||
agent::init(fs, cx);
|
||||
agent_settings::init(cx);
|
||||
language_model::init(client, cx);
|
||||
});
|
||||
@@ -451,10 +449,8 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_retrieving_old_thread(cx: &mut TestAppContext) {
|
||||
let tree = TempTree::new(json!({}));
|
||||
util::paths::set_home_dir(tree.path().into());
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
init_test(fs.clone(), cx);
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
|
||||
// Save a thread using the old agent.
|
||||
|
||||
@@ -262,7 +262,7 @@ impl HistoryStore {
|
||||
.iter()
|
||||
.filter_map(|entry| match entry {
|
||||
HistoryEntryId::TextThread(path) => path.file_name().map(|file| {
|
||||
SerializedRecentOpen::TextThread(file.to_string_lossy().to_string())
|
||||
SerializedRecentOpen::TextThread(file.to_string_lossy().into_owned())
|
||||
}),
|
||||
HistoryEntryId::AcpThread(id) => {
|
||||
Some(SerializedRecentOpen::AcpThread(id.to_string()))
|
||||
|
||||
@@ -48,16 +48,15 @@ The one exception to this is if the user references something you don't know abo
|
||||
## Code Block Formatting
|
||||
|
||||
Whenever you mention a code block, you MUST use ONLY use the following format:
|
||||
|
||||
```path/to/Something.blah#L123-456
|
||||
(code goes here)
|
||||
```
|
||||
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah
|
||||
is a path in the project. (If there is no valid path in the project, then you can use
|
||||
/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser
|
||||
does not understand the more common ```language syntax, or bare ``` blocks. It only
|
||||
understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
|
||||
|
||||
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah is a path in the project. (If there is no valid path in the project, then you can use /dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser does not understand the more common ```language syntax, or bare ``` blocks. It only understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
|
||||
Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP!
|
||||
You have made a mistake. You can only ever put paths after triple backticks!
|
||||
|
||||
<example>
|
||||
Based on all the information I've gathered, here's a summary of how this system works:
|
||||
1. The README file is loaded into the system.
|
||||
@@ -74,6 +73,7 @@ This is the last header in the README.
|
||||
```
|
||||
4. Finally, it passes this information on to the next process.
|
||||
</example>
|
||||
|
||||
<example>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```/dev/null/example.md#L1-3
|
||||
@@ -82,6 +82,7 @@ In Markdown, hash marks signify headings. For example:
|
||||
### Level 3 heading
|
||||
```
|
||||
</example>
|
||||
|
||||
Here are examples of ways you must never render code blocks:
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
@@ -91,7 +92,9 @@ In Markdown, hash marks signify headings. For example:
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it does not include the path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```markdown
|
||||
@@ -101,14 +104,15 @@ In Markdown, hash marks signify headings. For example:
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it has the language instead of the path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it uses indentation to mark the code block
|
||||
instead of backticks with a path.
|
||||
This example is unacceptable because it uses indentation to mark the code block instead of backticks with a path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```markdown
|
||||
|
||||
@@ -1850,8 +1850,18 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
.unwrap();
|
||||
let connection = NativeAgentConnection(agent.clone());
|
||||
|
||||
// Create a thread using new_thread
|
||||
let connection_rc = Rc::new(connection.clone());
|
||||
let acp_thread = cx
|
||||
.update(|cx| connection_rc.new_thread(project, cwd, cx))
|
||||
.await
|
||||
.expect("new_thread should succeed");
|
||||
|
||||
// Get the session_id from the AcpThread
|
||||
let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone());
|
||||
|
||||
// Test model_selector returns Some
|
||||
let selector_opt = connection.model_selector();
|
||||
let selector_opt = connection.model_selector(&session_id);
|
||||
assert!(
|
||||
selector_opt.is_some(),
|
||||
"agent2 should always support ModelSelector"
|
||||
@@ -1868,23 +1878,16 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
};
|
||||
assert!(!listed_models.is_empty(), "should have at least one model");
|
||||
assert_eq!(
|
||||
listed_models[&AgentModelGroupName("Fake".into())][0].id.0,
|
||||
listed_models[&AgentModelGroupName("Fake".into())][0]
|
||||
.id
|
||||
.0
|
||||
.as_ref(),
|
||||
"fake/fake"
|
||||
);
|
||||
|
||||
// Create a thread using new_thread
|
||||
let connection_rc = Rc::new(connection.clone());
|
||||
let acp_thread = cx
|
||||
.update(|cx| connection_rc.new_thread(project, cwd, cx))
|
||||
.await
|
||||
.expect("new_thread should succeed");
|
||||
|
||||
// Get the session_id from the AcpThread
|
||||
let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone());
|
||||
|
||||
// Test selected_model returns the default
|
||||
let model = cx
|
||||
.update(|cx| selector.selected_model(&session_id, cx))
|
||||
.update(|cx| selector.selected_model(cx))
|
||||
.await
|
||||
.expect("selected_model should succeed");
|
||||
let model = cx
|
||||
|
||||
@@ -879,27 +879,11 @@ impl Thread {
|
||||
.map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx))
|
||||
.collect();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
cx.spawn(async move |_, _| {
|
||||
let worktree_snapshots = futures::future::join_all(worktree_snapshots).await;
|
||||
|
||||
let mut unsaved_buffers = Vec::new();
|
||||
cx.update(|app_cx| {
|
||||
let buffer_store = project.read(app_cx).buffer_store();
|
||||
for buffer_handle in buffer_store.read(app_cx).buffers() {
|
||||
let buffer = buffer_handle.read(app_cx);
|
||||
if buffer.is_dirty()
|
||||
&& let Some(file) = buffer.file()
|
||||
{
|
||||
let path = file.path().to_string_lossy().to_string();
|
||||
unsaved_buffers.push(path);
|
||||
}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
Arc::new(ProjectSnapshot {
|
||||
worktree_snapshots,
|
||||
unsaved_buffer_paths: unsaved_buffers,
|
||||
timestamp: Utc::now(),
|
||||
})
|
||||
})
|
||||
@@ -914,7 +898,7 @@ impl Thread {
|
||||
// Get worktree path and snapshot
|
||||
let worktree_info = cx.update(|app_cx| {
|
||||
let worktree = worktree.read(app_cx);
|
||||
let path = worktree.abs_path().to_string_lossy().to_string();
|
||||
let path = worktree.abs_path().to_string_lossy().into_owned();
|
||||
let snapshot = worktree.snapshot();
|
||||
(path, snapshot)
|
||||
});
|
||||
|
||||
@@ -9,14 +9,14 @@ use std::sync::Arc;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
|
||||
/// Copies a file or directory in the project, and returns confirmation that the copy succeeded.
|
||||
/// Directory contents will be copied recursively (like `cp -r`).
|
||||
/// Directory contents will be copied recursively.
|
||||
///
|
||||
/// This tool should be used when it's desirable to create a copy of a file or directory without modifying the original.
|
||||
/// It's much more efficient than doing this by separately reading and then writing the file or directory's contents, so this tool should be preferred over that approach whenever copying is the goal.
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct CopyPathToolInput {
|
||||
/// The source path of the file or directory to copy.
|
||||
/// If a directory is specified, its contents will be copied recursively (like `cp -r`).
|
||||
/// If a directory is specified, its contents will be copied recursively.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following files:
|
||||
@@ -84,9 +84,7 @@ impl AgentTool for CopyPathTool {
|
||||
.and_then(|project_path| project.entry_for_path(&project_path, cx))
|
||||
{
|
||||
Some(entity) => match project.find_project_path(&input.destination_path, cx) {
|
||||
Some(project_path) => {
|
||||
project.copy_entry(entity.id, None, project_path.path, cx)
|
||||
}
|
||||
Some(project_path) => project.copy_entry(entity.id, project_path, cx),
|
||||
None => Task::ready(Err(anyhow!(
|
||||
"Destination path {} was outside the project.",
|
||||
input.destination_path
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::{AgentTool, ToolCallEventStream};
|
||||
|
||||
/// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created.
|
||||
///
|
||||
/// This tool creates a directory and all necessary parent directories (similar to `mkdir -p`). It should be used whenever you need to create new directories within the project.
|
||||
/// This tool creates a directory and all necessary parent directories. It should be used whenever you need to create new directories within the project.
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct CreateDirectoryToolInput {
|
||||
/// The path of the new directory.
|
||||
|
||||
@@ -6,7 +6,7 @@ use language::{DiagnosticSeverity, OffsetRangeExt};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Write, path::Path, sync::Arc};
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use ui::SharedString;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
|
||||
@@ -147,9 +147,7 @@ impl AgentTool for DiagnosticsTool {
|
||||
has_diagnostics = true;
|
||||
output.push_str(&format!(
|
||||
"{}: {} error(s), {} warning(s)\n",
|
||||
Path::new(worktree.read(cx).root_name())
|
||||
.join(project_path.path)
|
||||
.display(),
|
||||
worktree.read(cx).absolutize(&project_path.path).display(),
|
||||
summary.error_count,
|
||||
summary.warning_count
|
||||
));
|
||||
|
||||
@@ -17,10 +17,12 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use smol::stream::StreamExt as _;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use ui::SharedString;
|
||||
use util::ResultExt;
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
const DEFAULT_UI_TEXT: &str = "Editing file";
|
||||
|
||||
@@ -148,12 +150,11 @@ impl EditFileTool {
|
||||
|
||||
// If any path component matches the local settings folder, then this could affect
|
||||
// the editor in ways beyond the project source, so prompt.
|
||||
let local_settings_folder = paths::local_settings_folder_relative_path();
|
||||
let local_settings_folder = paths::local_settings_folder_name();
|
||||
let path = Path::new(&input.path);
|
||||
if path
|
||||
.components()
|
||||
.any(|component| component.as_os_str() == local_settings_folder.as_os_str())
|
||||
{
|
||||
if path.components().any(|component| {
|
||||
component.as_os_str() == <_ as AsRef<OsStr>>::as_ref(&local_settings_folder)
|
||||
}) {
|
||||
return event_stream.authorize(
|
||||
format!("{} (local settings)", input.display_description),
|
||||
cx,
|
||||
@@ -162,6 +163,7 @@ impl EditFileTool {
|
||||
|
||||
// It's also possible that the global config dir is configured to be inside the project,
|
||||
// so check for that edge case too.
|
||||
// TODO this is broken when remoting
|
||||
if let Ok(canonical_path) = std::fs::canonicalize(&input.path)
|
||||
&& canonical_path.starts_with(paths::config_dir())
|
||||
{
|
||||
@@ -216,9 +218,7 @@ impl AgentTool for EditFileTool {
|
||||
.read(cx)
|
||||
.short_full_path_for_project_path(&project_path, cx)
|
||||
})
|
||||
.unwrap_or(Path::new(&input.path).into())
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
.unwrap_or(input.path.to_string_lossy().into_owned())
|
||||
.into(),
|
||||
Err(raw_input) => {
|
||||
if let Some(input) =
|
||||
@@ -235,9 +235,7 @@ impl AgentTool for EditFileTool {
|
||||
.read(cx)
|
||||
.short_full_path_for_project_path(&project_path, cx)
|
||||
})
|
||||
.unwrap_or(Path::new(&input.path).into())
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
.unwrap_or(input.path)
|
||||
.into();
|
||||
}
|
||||
|
||||
@@ -478,7 +476,7 @@ impl AgentTool for EditFileTool {
|
||||
) -> Result<()> {
|
||||
event_stream.update_diff(cx.new(|cx| {
|
||||
Diff::finalized(
|
||||
output.input_path,
|
||||
output.input_path.to_string_lossy().into_owned(),
|
||||
Some(output.old_text.to_string()),
|
||||
output.new_text,
|
||||
self.language_registry.clone(),
|
||||
@@ -542,10 +540,12 @@ fn resolve_path(
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.and_then(|file_name| file_name.to_str())
|
||||
.and_then(|file_name| RelPath::unix(file_name).ok())
|
||||
.context("Can't create file: invalid filename")?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
path: parent.path.join(file_name),
|
||||
..parent
|
||||
});
|
||||
|
||||
@@ -565,7 +565,7 @@ mod tests {
|
||||
use prompt_store::ProjectContext;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
use util::{path, rel_path::rel_path};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_edit_nonexistent_file(cx: &mut TestAppContext) {
|
||||
@@ -614,13 +614,13 @@ mod tests {
|
||||
let mode = &EditFileMode::Create;
|
||||
|
||||
let result = test_resolve_path(mode, "root/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
assert_resolved_path_eq(result.await, rel_path("new.txt"));
|
||||
|
||||
let result = test_resolve_path(mode, "new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
assert_resolved_path_eq(result.await, rel_path("new.txt"));
|
||||
|
||||
let result = test_resolve_path(mode, "dir/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "dir/new.txt");
|
||||
assert_resolved_path_eq(result.await, rel_path("dir/new.txt"));
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
|
||||
assert_eq!(
|
||||
@@ -642,10 +642,10 @@ mod tests {
|
||||
let path_with_root = "root/dir/subdir/existing.txt";
|
||||
let path_without_root = "dir/subdir/existing.txt";
|
||||
let result = test_resolve_path(mode, path_with_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
assert_resolved_path_eq(result.await, rel_path(path_without_root));
|
||||
|
||||
let result = test_resolve_path(mode, path_without_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
assert_resolved_path_eq(result.await, rel_path(path_without_root));
|
||||
|
||||
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
|
||||
assert_eq!(
|
||||
@@ -690,14 +690,10 @@ mod tests {
|
||||
cx.update(|cx| resolve_path(&input, project, cx))
|
||||
}
|
||||
|
||||
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "/"); // Naive Windows paths normalization
|
||||
assert_eq!(actual, expected);
|
||||
#[track_caller]
|
||||
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &RelPath) {
|
||||
let actual = path.expect("Should return valid path").path;
|
||||
assert_eq!(actual.as_ref(), expected);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -1408,8 +1404,8 @@ mod tests {
|
||||
// Parent directory references - find_project_path resolves these
|
||||
(
|
||||
"project/../other",
|
||||
false,
|
||||
"Path with .. is resolved by find_project_path",
|
||||
true,
|
||||
"Path with .. that goes outside of root directory",
|
||||
),
|
||||
(
|
||||
"project/./src/file.rs",
|
||||
@@ -1437,16 +1433,18 @@ mod tests {
|
||||
)
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
if should_confirm {
|
||||
stream_rx.expect_authorization().await;
|
||||
} else {
|
||||
auth.await.unwrap();
|
||||
assert!(
|
||||
stream_rx.try_next().is_err(),
|
||||
"Failed for case: {} - path: {} - expected no confirmation but got one",
|
||||
description,
|
||||
path
|
||||
);
|
||||
auth.await.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,10 +156,14 @@ impl AgentTool for FindPathTool {
|
||||
}
|
||||
|
||||
fn search_paths(glob: &str, project: Entity<Project>, cx: &mut App) -> Task<Result<Vec<PathBuf>>> {
|
||||
let path_matcher = match PathMatcher::new([
|
||||
// Sometimes models try to search for "". In this case, return all paths in the project.
|
||||
if glob.is_empty() { "*" } else { glob },
|
||||
]) {
|
||||
let path_style = project.read(cx).path_style(cx);
|
||||
let path_matcher = match PathMatcher::new(
|
||||
[
|
||||
// Sometimes models try to search for "". In this case, return all paths in the project.
|
||||
if glob.is_empty() { "*" } else { glob },
|
||||
],
|
||||
path_style,
|
||||
) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))),
|
||||
};
|
||||
@@ -173,9 +177,8 @@ fn search_paths(glob: &str, project: Entity<Project>, cx: &mut App) -> Task<Resu
|
||||
let mut results = Vec::new();
|
||||
for snapshot in snapshots {
|
||||
for entry in snapshot.entries(false, 0) {
|
||||
let root_name = PathBuf::from(snapshot.root_name());
|
||||
if path_matcher.is_match(root_name.join(&entry.path)) {
|
||||
results.push(snapshot.abs_path().join(entry.path.as_ref()));
|
||||
if path_matcher.is_match(snapshot.root_name().join(&entry.path).as_std_path()) {
|
||||
results.push(snapshot.absolutize(&entry.path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,12 +110,15 @@ impl AgentTool for GrepTool {
|
||||
const CONTEXT_LINES: u32 = 2;
|
||||
const MAX_ANCESTOR_LINES: u32 = 10;
|
||||
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
|
||||
let include_matcher = match PathMatcher::new(
|
||||
input
|
||||
.include_pattern
|
||||
.as_ref()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>(),
|
||||
path_style,
|
||||
) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(error) => {
|
||||
@@ -132,7 +135,7 @@ impl AgentTool for GrepTool {
|
||||
.iter()
|
||||
.chain(global_settings.private_files.sources().iter());
|
||||
|
||||
match PathMatcher::new(exclude_patterns) {
|
||||
match PathMatcher::new(exclude_patterns, path_style) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("invalid exclude pattern: {error}")));
|
||||
@@ -834,11 +837,14 @@ mod tests {
|
||||
"**/.secretdir".to_string(),
|
||||
"**/.mymetadata".to_string(),
|
||||
]);
|
||||
settings.project.worktree.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
settings.project.worktree.private_files = Some(
|
||||
vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]
|
||||
.into(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1064,7 +1070,8 @@ mod tests {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.project.worktree.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.project.worktree.private_files = Some(vec!["**/.env".to_string()]);
|
||||
settings.project.worktree.private_files =
|
||||
Some(vec!["**/.env".to_string()].into());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,12 +2,12 @@ use crate::{AgentTool, ToolCallEventStream};
|
||||
use agent_client_protocol::ToolKind;
|
||||
use anyhow::{Result, anyhow};
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use project::{Project, WorktreeSettings};
|
||||
use project::{Project, ProjectPath, WorktreeSettings};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::fmt::Write;
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
|
||||
/// Lists files and directories in a given path. Prefer the `grep` or `find_path` tools when searching the codebase.
|
||||
@@ -86,13 +86,13 @@ impl AgentTool for ListDirectoryTool {
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.filter_map(|worktree| {
|
||||
worktree.read(cx).root_entry().and_then(|entry| {
|
||||
if entry.is_dir() {
|
||||
entry.path.to_str()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
let worktree = worktree.read(cx);
|
||||
let root_entry = worktree.root_entry()?;
|
||||
if root_entry.is_dir() {
|
||||
Some(root_entry.path.display(worktree.path_style()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
@@ -143,7 +143,7 @@ impl AgentTool for ListDirectoryTool {
|
||||
}
|
||||
|
||||
let worktree_snapshot = worktree.read(cx).snapshot();
|
||||
let worktree_root_name = worktree.read(cx).root_name().to_string();
|
||||
let worktree_root_name = worktree.read(cx).root_name();
|
||||
|
||||
let Some(entry) = worktree_snapshot.entry_for_path(&project_path.path) else {
|
||||
return Task::ready(Err(anyhow!("Path not found: {}", input.path)));
|
||||
@@ -165,25 +165,17 @@ impl AgentTool for ListDirectoryTool {
|
||||
continue;
|
||||
}
|
||||
|
||||
if self
|
||||
.project
|
||||
.read(cx)
|
||||
.find_project_path(&entry.path, cx)
|
||||
.map(|project_path| {
|
||||
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
|
||||
|
||||
worktree_settings.is_path_excluded(&project_path.path)
|
||||
|| worktree_settings.is_path_private(&project_path.path)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
let project_path: ProjectPath = (worktree_snapshot.id(), entry.path.clone()).into();
|
||||
if worktree_settings.is_path_excluded(&project_path.path)
|
||||
|| worktree_settings.is_path_private(&project_path.path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let full_path = Path::new(&worktree_root_name)
|
||||
let full_path = worktree_root_name
|
||||
.join(&entry.path)
|
||||
.display()
|
||||
.to_string();
|
||||
.display(worktree_snapshot.path_style())
|
||||
.into_owned();
|
||||
if entry.is_dir() {
|
||||
folders.push(full_path);
|
||||
} else {
|
||||
@@ -427,11 +419,14 @@ mod tests {
|
||||
"**/.mymetadata".to_string(),
|
||||
"**/.hidden_subdir".to_string(),
|
||||
]);
|
||||
settings.project.worktree.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
settings.project.worktree.private_files = Some(
|
||||
vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]
|
||||
.into(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -568,7 +563,8 @@ mod tests {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.project.worktree.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.project.worktree.private_files = Some(vec!["**/.env".to_string()]);
|
||||
settings.project.worktree.private_files =
|
||||
Some(vec!["**/.env".to_string()].into());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -98,7 +98,7 @@ impl AgentTool for MovePathTool {
|
||||
.and_then(|project_path| project.entry_for_path(&project_path, cx))
|
||||
{
|
||||
Some(entity) => match project.find_project_path(&input.destination_path, cx) {
|
||||
Some(project_path) => project.rename_entry(entity.id, project_path.path, cx),
|
||||
Some(project_path) => project.rename_entry(entity.id, project_path, cx),
|
||||
None => Task::ready(Err(anyhow!(
|
||||
"Destination path {} was outside the project.",
|
||||
input.destination_path
|
||||
|
||||
@@ -104,7 +104,7 @@ mod tests {
|
||||
async fn test_to_absolute_path(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
let temp_path = temp_dir.path().to_string_lossy().to_string();
|
||||
let temp_path = temp_dir.path().to_string_lossy().into_owned();
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
|
||||
@@ -82,12 +82,12 @@ impl AgentTool for ReadFileTool {
|
||||
{
|
||||
match (input.start_line, input.end_line) {
|
||||
(Some(start), Some(end)) => {
|
||||
format!("Read file `{}` (lines {}-{})", path.display(), start, end,)
|
||||
format!("Read file `{path}` (lines {}-{})", start, end,)
|
||||
}
|
||||
(Some(start), None) => {
|
||||
format!("Read file `{}` (from line {})", path.display(), start)
|
||||
format!("Read file `{path}` (from line {})", start)
|
||||
}
|
||||
_ => format!("Read file `{}`", path.display()),
|
||||
_ => format!("Read file `{path}`"),
|
||||
}
|
||||
.into()
|
||||
} else {
|
||||
@@ -225,9 +225,12 @@ impl AgentTool for ReadFileTool {
|
||||
Ok(result.into())
|
||||
} else {
|
||||
// No line ranges specified, so check file size to see if it's too big.
|
||||
let buffer_content =
|
||||
outline::get_buffer_content_or_outline(buffer.clone(), Some(&abs_path), cx)
|
||||
.await?;
|
||||
let buffer_content = outline::get_buffer_content_or_outline(
|
||||
buffer.clone(),
|
||||
Some(&abs_path.to_string_lossy()),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
@@ -593,11 +596,14 @@ mod test {
|
||||
"**/.secretdir".to_string(),
|
||||
"**/.mymetadata".to_string(),
|
||||
]);
|
||||
settings.project.worktree.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
settings.project.worktree.private_files = Some(
|
||||
vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]
|
||||
.into(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -804,7 +810,8 @@ mod test {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.project.worktree.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.project.worktree.private_files = Some(vec!["**/.env".to_string()]);
|
||||
settings.project.worktree.private_files =
|
||||
Some(vec!["**/.env".to_string()].into());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -82,7 +82,7 @@ impl AgentTool for TerminalTool {
|
||||
.into(),
|
||||
}
|
||||
} else {
|
||||
"Run terminal command".into()
|
||||
"".into()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,7 @@ pub struct AcpConnection {
|
||||
pub struct AcpSession {
|
||||
thread: WeakEntity<AcpThread>,
|
||||
suppress_abort_err: bool,
|
||||
models: Option<Rc<RefCell<acp::SessionModelState>>>,
|
||||
session_modes: Option<Rc<RefCell<acp::SessionModeState>>>,
|
||||
}
|
||||
|
||||
@@ -264,6 +265,7 @@ impl AgentConnection for AcpConnection {
|
||||
})?;
|
||||
|
||||
let modes = response.modes.map(|modes| Rc::new(RefCell::new(modes)));
|
||||
let models = response.models.map(|models| Rc::new(RefCell::new(models)));
|
||||
|
||||
if let Some(default_mode) = default_mode {
|
||||
if let Some(modes) = modes.as_ref() {
|
||||
@@ -326,10 +328,12 @@ impl AgentConnection for AcpConnection {
|
||||
)
|
||||
})?;
|
||||
|
||||
|
||||
let session = AcpSession {
|
||||
thread: thread.downgrade(),
|
||||
suppress_abort_err: false,
|
||||
session_modes: modes
|
||||
session_modes: modes,
|
||||
models,
|
||||
};
|
||||
sessions.borrow_mut().insert(session_id, session);
|
||||
|
||||
@@ -450,6 +454,27 @@ impl AgentConnection for AcpConnection {
|
||||
}
|
||||
}
|
||||
|
||||
fn model_selector(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
) -> Option<Rc<dyn acp_thread::AgentModelSelector>> {
|
||||
let sessions = self.sessions.clone();
|
||||
let sessions_ref = sessions.borrow();
|
||||
let Some(session) = sessions_ref.get(session_id) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
if let Some(models) = session.models.as_ref() {
|
||||
Some(Rc::new(AcpModelSelector::new(
|
||||
session_id.clone(),
|
||||
self.connection.clone(),
|
||||
models.clone(),
|
||||
)) as _)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
|
||||
self
|
||||
}
|
||||
@@ -500,6 +525,82 @@ impl acp_thread::AgentSessionModes for AcpSessionModes {
|
||||
}
|
||||
}
|
||||
|
||||
struct AcpModelSelector {
|
||||
session_id: acp::SessionId,
|
||||
connection: Rc<acp::ClientSideConnection>,
|
||||
state: Rc<RefCell<acp::SessionModelState>>,
|
||||
}
|
||||
|
||||
impl AcpModelSelector {
|
||||
fn new(
|
||||
session_id: acp::SessionId,
|
||||
connection: Rc<acp::ClientSideConnection>,
|
||||
state: Rc<RefCell<acp::SessionModelState>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
session_id,
|
||||
connection,
|
||||
state,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl acp_thread::AgentModelSelector for AcpModelSelector {
|
||||
fn list_models(&self, _cx: &mut App) -> Task<Result<acp_thread::AgentModelList>> {
|
||||
Task::ready(Ok(acp_thread::AgentModelList::Flat(
|
||||
self.state
|
||||
.borrow()
|
||||
.available_models
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(acp_thread::AgentModelInfo::from)
|
||||
.collect(),
|
||||
)))
|
||||
}
|
||||
|
||||
fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task<Result<()>> {
|
||||
let connection = self.connection.clone();
|
||||
let session_id = self.session_id.clone();
|
||||
let old_model_id;
|
||||
{
|
||||
let mut state = self.state.borrow_mut();
|
||||
old_model_id = state.current_model_id.clone();
|
||||
state.current_model_id = model_id.clone();
|
||||
};
|
||||
let state = self.state.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let result = connection
|
||||
.set_session_model(acp::SetSessionModelRequest {
|
||||
session_id,
|
||||
model_id,
|
||||
meta: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
state.borrow_mut().current_model_id = old_model_id;
|
||||
}
|
||||
|
||||
result?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn selected_model(&self, _cx: &mut App) -> Task<Result<acp_thread::AgentModelInfo>> {
|
||||
let state = self.state.borrow();
|
||||
Task::ready(
|
||||
state
|
||||
.available_models
|
||||
.iter()
|
||||
.find(|m| m.model_id == state.current_model_id)
|
||||
.cloned()
|
||||
.map(acp_thread::AgentModelInfo::from)
|
||||
.ok_or_else(|| anyhow::anyhow!("Model not found")),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct ClientDelegate {
|
||||
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
|
||||
cx: AsyncApp,
|
||||
|
||||
@@ -99,6 +99,9 @@ pub fn load_proxy_env(cx: &mut App) -> HashMap<String, String> {
|
||||
|
||||
if let Some(no_proxy) = read_no_proxy_from_env() {
|
||||
env.insert("NO_PROXY".to_owned(), no_proxy);
|
||||
} else if proxy_url.is_some() {
|
||||
// We sometimes need local MCP servers that we don't want to proxy
|
||||
env.insert("NO_PROXY".to_owned(), "localhost,127.0.0.1".to_owned());
|
||||
}
|
||||
|
||||
env
|
||||
|
||||
@@ -62,7 +62,7 @@ impl AgentServer for ClaudeCode {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let extra_env = load_proxy_env(cx);
|
||||
|
||||
@@ -67,7 +67,7 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let default_mode = self.default_mode(cx);
|
||||
let store = delegate.store.downgrade();
|
||||
|
||||
@@ -31,7 +31,7 @@ impl AgentServer for Gemini {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let mut extra_env = load_proxy_env(cx);
|
||||
|
||||
@@ -19,6 +19,7 @@ convert_case.workspace = true
|
||||
fs.workspace = true
|
||||
gpui.workspace = true
|
||||
language_model.workspace = true
|
||||
project.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
|
||||
@@ -5,13 +5,13 @@ use std::sync::Arc;
|
||||
use collections::IndexMap;
|
||||
use gpui::{App, Pixels, px};
|
||||
use language_model::LanguageModel;
|
||||
use project::DisableAiSettings;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection,
|
||||
NotifyWhenAgentWaiting, Settings, SettingsContent,
|
||||
};
|
||||
use util::MergeFrom;
|
||||
|
||||
pub use crate::agent_profile::*;
|
||||
|
||||
@@ -54,6 +54,10 @@ pub struct AgentSettings {
|
||||
}
|
||||
|
||||
impl AgentSettings {
|
||||
pub fn enabled(&self, cx: &App) -> bool {
|
||||
self.enabled && !DisableAiSettings::get_global(cx).disable_ai
|
||||
}
|
||||
|
||||
pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
|
||||
let settings = Self::get_global(cx);
|
||||
for setting in settings.model_parameters.iter().rev() {
|
||||
@@ -147,7 +151,7 @@ impl Default for AgentProfileId {
|
||||
}
|
||||
|
||||
impl Settings for AgentSettings {
|
||||
fn from_defaults(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self {
|
||||
let agent = content.agent.clone().unwrap();
|
||||
Self {
|
||||
enabled: agent.enabled.unwrap(),
|
||||
@@ -183,66 +187,6 @@ impl Settings for AgentSettings {
|
||||
}
|
||||
}
|
||||
|
||||
fn refine(&mut self, content: &settings::SettingsContent, _: &mut App) {
|
||||
let Some(value) = &content.agent else { return };
|
||||
self.enabled.merge_from(&value.enabled);
|
||||
self.button.merge_from(&value.button);
|
||||
self.dock.merge_from(&value.dock);
|
||||
self.default_width
|
||||
.merge_from(&value.default_width.map(Into::into));
|
||||
self.default_height
|
||||
.merge_from(&value.default_height.map(Into::into));
|
||||
self.default_model = value.default_model.clone().or(self.default_model.take());
|
||||
|
||||
self.inline_assistant_model = value
|
||||
.inline_assistant_model
|
||||
.clone()
|
||||
.or(self.inline_assistant_model.take());
|
||||
self.commit_message_model = value
|
||||
.clone()
|
||||
.commit_message_model
|
||||
.or(self.commit_message_model.take());
|
||||
self.thread_summary_model = value
|
||||
.clone()
|
||||
.thread_summary_model
|
||||
.or(self.thread_summary_model.take());
|
||||
self.inline_alternatives
|
||||
.merge_from(&value.inline_alternatives.clone());
|
||||
self.default_profile
|
||||
.merge_from(&value.default_profile.clone().map(AgentProfileId));
|
||||
self.default_view.merge_from(&value.default_view);
|
||||
self.always_allow_tool_actions
|
||||
.merge_from(&value.always_allow_tool_actions);
|
||||
self.notify_when_agent_waiting
|
||||
.merge_from(&value.notify_when_agent_waiting);
|
||||
self.play_sound_when_agent_done
|
||||
.merge_from(&value.play_sound_when_agent_done);
|
||||
self.stream_edits.merge_from(&value.stream_edits);
|
||||
self.single_file_review
|
||||
.merge_from(&value.single_file_review);
|
||||
self.preferred_completion_mode
|
||||
.merge_from(&value.preferred_completion_mode.map(Into::into));
|
||||
self.enable_feedback.merge_from(&value.enable_feedback);
|
||||
self.expand_edit_card.merge_from(&value.expand_edit_card);
|
||||
self.expand_terminal_card
|
||||
.merge_from(&value.expand_terminal_card);
|
||||
self.use_modifier_to_send
|
||||
.merge_from(&value.use_modifier_to_send);
|
||||
|
||||
self.model_parameters
|
||||
.extend_from_slice(&value.model_parameters);
|
||||
self.message_editor_min_lines
|
||||
.merge_from(&value.message_editor_min_lines);
|
||||
|
||||
if let Some(profiles) = value.profiles.as_ref() {
|
||||
self.profiles.extend(
|
||||
profiles
|
||||
.into_iter()
|
||||
.map(|(id, profile)| (AgentProfileId(id.clone()), profile.clone().into())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) {
|
||||
if let Some(b) = vscode
|
||||
.read_value("chat.agent.enabled")
|
||||
|
||||
@@ -80,6 +80,7 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
shlex.workspace = true
|
||||
smol.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
task.workspace = true
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@@ -13,7 +14,7 @@ use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::lsp_store::CompletionDocumentation;
|
||||
use project::lsp_store::{CompletionDocumentation, SymbolLocation};
|
||||
use project::{
|
||||
Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, Project,
|
||||
ProjectPath, Symbol, WorktreeId,
|
||||
@@ -22,6 +23,7 @@ use prompt_store::PromptStore;
|
||||
use rope::Point;
|
||||
use text::{Anchor, ToPoint as _};
|
||||
use ui::prelude::*;
|
||||
use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::AgentPanel;
|
||||
@@ -187,7 +189,7 @@ impl ContextPickerCompletionProvider {
|
||||
|
||||
pub(crate) fn completion_for_path(
|
||||
project_path: ProjectPath,
|
||||
path_prefix: &str,
|
||||
path_prefix: &RelPath,
|
||||
is_recent: bool,
|
||||
is_directory: bool,
|
||||
source_range: Range<Anchor>,
|
||||
@@ -195,10 +197,12 @@ impl ContextPickerCompletionProvider {
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Option<Completion> {
|
||||
let path_style = project.read(cx).path_style(cx);
|
||||
let (file_name, directory) =
|
||||
crate::context_picker::file_context_picker::extract_file_name_and_directory(
|
||||
&project_path.path,
|
||||
path_prefix,
|
||||
path_style,
|
||||
);
|
||||
|
||||
let label =
|
||||
@@ -250,7 +254,15 @@ impl ContextPickerCompletionProvider {
|
||||
|
||||
let label = CodeLabel::plain(symbol.name.clone(), None);
|
||||
|
||||
let abs_path = project.read(cx).absolute_path(&symbol.path, cx)?;
|
||||
let abs_path = match &symbol.path {
|
||||
SymbolLocation::InProject(project_path) => {
|
||||
project.read(cx).absolute_path(&project_path, cx)?
|
||||
}
|
||||
SymbolLocation::OutsideProject {
|
||||
abs_path,
|
||||
signature: _,
|
||||
} => PathBuf::from(abs_path.as_ref()),
|
||||
};
|
||||
let uri = MentionUri::Symbol {
|
||||
abs_path,
|
||||
name: symbol.name.clone(),
|
||||
|
||||
@@ -47,13 +47,8 @@ use std::{
|
||||
};
|
||||
use text::OffsetRangeExt;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, ButtonStyle, Color, Element as _,
|
||||
FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label,
|
||||
LabelCommon, LabelSize, ParentElement, Render, SelectableButton, Styled, TextSize, TintColor,
|
||||
Toggleable, Window, div, h_flex,
|
||||
};
|
||||
use util::{ResultExt, debug_panic};
|
||||
use ui::{ButtonLike, TintColor, Toggleable, prelude::*};
|
||||
use util::{ResultExt, debug_panic, rel_path::RelPath};
|
||||
use workspace::{Workspace, notifications::NotifyResultExt as _};
|
||||
use zed_actions::agent::Chat;
|
||||
|
||||
@@ -81,7 +76,7 @@ pub enum MessageEditorEvent {
|
||||
|
||||
impl EventEmitter<MessageEditorEvent> for MessageEditor {}
|
||||
|
||||
const COMMAND_HINT_INLAY_ID: usize = 0;
|
||||
const COMMAND_HINT_INLAY_ID: u32 = 0;
|
||||
|
||||
impl MessageEditor {
|
||||
pub fn new(
|
||||
@@ -364,7 +359,7 @@ impl MessageEditor {
|
||||
|
||||
let task = match mention_uri.clone() {
|
||||
MentionUri::Fetch { url } => self.confirm_mention_for_fetch(url, cx),
|
||||
MentionUri::Directory { abs_path } => self.confirm_mention_for_directory(abs_path, cx),
|
||||
MentionUri::Directory { .. } => Task::ready(Ok(Mention::UriOnly)),
|
||||
MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx),
|
||||
MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx),
|
||||
MentionUri::File { abs_path } => self.confirm_mention_for_file(abs_path, cx),
|
||||
@@ -457,9 +452,12 @@ impl MessageEditor {
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx));
|
||||
cx.spawn(async move |_, cx| {
|
||||
let buffer = buffer.await?;
|
||||
let buffer_content =
|
||||
outline::get_buffer_content_or_outline(buffer.clone(), Some(&abs_path), &cx)
|
||||
.await?;
|
||||
let buffer_content = outline::get_buffer_content_or_outline(
|
||||
buffer.clone(),
|
||||
Some(&abs_path.to_string_lossy()),
|
||||
&cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(Mention::Text {
|
||||
content: buffer_content.text,
|
||||
@@ -468,97 +466,6 @@ impl MessageEditor {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm_mention_for_directory(
|
||||
&mut self,
|
||||
abs_path: PathBuf,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<Mention>> {
|
||||
fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<(Arc<Path>, PathBuf)> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
for entry in worktree.child_entries(path) {
|
||||
if entry.is_dir() {
|
||||
files.extend(collect_files_in_path(worktree, &entry.path));
|
||||
} else if entry.is_file() {
|
||||
files.push((entry.path.clone(), worktree.full_path(&entry.path)));
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
let Some(project_path) = self
|
||||
.project
|
||||
.read(cx)
|
||||
.project_path_for_absolute_path(&abs_path, cx)
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("project path not found")));
|
||||
};
|
||||
let Some(entry) = self.project.read(cx).entry_for_path(&project_path, cx) else {
|
||||
return Task::ready(Err(anyhow!("project entry not found")));
|
||||
};
|
||||
let directory_path = entry.path.clone();
|
||||
let worktree_id = project_path.worktree_id;
|
||||
let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) else {
|
||||
return Task::ready(Err(anyhow!("worktree not found")));
|
||||
};
|
||||
let project = self.project.clone();
|
||||
cx.spawn(async move |_, cx| {
|
||||
let file_paths = worktree.read_with(cx, |worktree, _cx| {
|
||||
collect_files_in_path(worktree, &directory_path)
|
||||
})?;
|
||||
let descendants_future = cx.update(|cx| {
|
||||
join_all(file_paths.into_iter().map(|(worktree_path, full_path)| {
|
||||
let rel_path = worktree_path
|
||||
.strip_prefix(&directory_path)
|
||||
.log_err()
|
||||
.map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into());
|
||||
|
||||
let open_task = project.update(cx, |project, cx| {
|
||||
project.buffer_store().update(cx, |buffer_store, cx| {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
path: worktree_path,
|
||||
};
|
||||
buffer_store.open_buffer(project_path, cx)
|
||||
})
|
||||
});
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let buffer = open_task.await.log_err()?;
|
||||
let buffer_content = outline::get_buffer_content_or_outline(
|
||||
buffer.clone(),
|
||||
Some(&full_path),
|
||||
&cx,
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
|
||||
Some((rel_path, full_path, buffer_content.text, buffer))
|
||||
})
|
||||
}))
|
||||
})?;
|
||||
|
||||
let contents = cx
|
||||
.background_spawn(async move {
|
||||
let (contents, tracked_buffers) = descendants_future
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|(rel_path, full_path, rope, buffer)| {
|
||||
((rel_path, full_path, rope), buffer)
|
||||
})
|
||||
.unzip();
|
||||
Mention::Text {
|
||||
content: render_directory_contents(contents),
|
||||
tracked_buffers,
|
||||
}
|
||||
})
|
||||
.await;
|
||||
anyhow::Ok(contents)
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm_mention_for_fetch(
|
||||
&mut self,
|
||||
url: url::Url,
|
||||
@@ -776,6 +683,7 @@ impl MessageEditor {
|
||||
|
||||
pub fn contents(
|
||||
&self,
|
||||
full_mention_content: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>> {
|
||||
// Check for unsupported slash commands before spawning async task
|
||||
@@ -787,9 +695,12 @@ impl MessageEditor {
|
||||
return Task::ready(Err(err));
|
||||
}
|
||||
|
||||
let contents = self
|
||||
.mention_set
|
||||
.contents(&self.prompt_capabilities.borrow(), cx);
|
||||
let contents = self.mention_set.contents(
|
||||
&self.prompt_capabilities.borrow(),
|
||||
full_mention_content,
|
||||
self.project.clone(),
|
||||
cx,
|
||||
);
|
||||
let editor = self.editor.clone();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
@@ -1039,6 +950,7 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let buffer = self.editor.read(cx).buffer().clone();
|
||||
let Some(buffer) = buffer.read(cx).as_singleton() else {
|
||||
return;
|
||||
@@ -1048,18 +960,15 @@ impl MessageEditor {
|
||||
let Some(entry) = self.project.read(cx).entry_for_path(&path, cx) else {
|
||||
continue;
|
||||
};
|
||||
let Some(abs_path) = self.project.read(cx).absolute_path(&path, cx) else {
|
||||
let Some(worktree) = self.project.read(cx).worktree_for_id(path.worktree_id, cx) else {
|
||||
continue;
|
||||
};
|
||||
let path_prefix = abs_path
|
||||
.file_name()
|
||||
.unwrap_or(path.path.as_os_str())
|
||||
.display()
|
||||
.to_string();
|
||||
let abs_path = worktree.read(cx).absolutize(&path.path);
|
||||
let (file_name, _) =
|
||||
crate::context_picker::file_context_picker::extract_file_name_and_directory(
|
||||
&path.path,
|
||||
&path_prefix,
|
||||
worktree.read(cx).root_name(),
|
||||
path_style,
|
||||
);
|
||||
|
||||
let uri = if entry.is_dir() {
|
||||
@@ -1263,7 +1172,103 @@ impl MessageEditor {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_directory_contents(entries: Vec<(Arc<Path>, PathBuf, String)>) -> String {
|
||||
fn full_mention_for_directory(
|
||||
project: &Entity<Project>,
|
||||
abs_path: &Path,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Mention>> {
|
||||
fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<(Arc<RelPath>, String)> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
for entry in worktree.child_entries(path) {
|
||||
if entry.is_dir() {
|
||||
files.extend(collect_files_in_path(worktree, &entry.path));
|
||||
} else if entry.is_file() {
|
||||
files.push((
|
||||
entry.path.clone(),
|
||||
worktree
|
||||
.full_path(&entry.path)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
let Some(project_path) = project
|
||||
.read(cx)
|
||||
.project_path_for_absolute_path(&abs_path, cx)
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("project path not found")));
|
||||
};
|
||||
let Some(entry) = project.read(cx).entry_for_path(&project_path, cx) else {
|
||||
return Task::ready(Err(anyhow!("project entry not found")));
|
||||
};
|
||||
let directory_path = entry.path.clone();
|
||||
let worktree_id = project_path.worktree_id;
|
||||
let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) else {
|
||||
return Task::ready(Err(anyhow!("worktree not found")));
|
||||
};
|
||||
let project = project.clone();
|
||||
cx.spawn(async move |cx| {
|
||||
let file_paths = worktree.read_with(cx, |worktree, _cx| {
|
||||
collect_files_in_path(worktree, &directory_path)
|
||||
})?;
|
||||
let descendants_future = cx.update(|cx| {
|
||||
join_all(file_paths.into_iter().map(|(worktree_path, full_path)| {
|
||||
let rel_path = worktree_path
|
||||
.strip_prefix(&directory_path)
|
||||
.log_err()
|
||||
.map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into());
|
||||
|
||||
let open_task = project.update(cx, |project, cx| {
|
||||
project.buffer_store().update(cx, |buffer_store, cx| {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
path: worktree_path,
|
||||
};
|
||||
buffer_store.open_buffer(project_path, cx)
|
||||
})
|
||||
});
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let buffer = open_task.await.log_err()?;
|
||||
let buffer_content = outline::get_buffer_content_or_outline(
|
||||
buffer.clone(),
|
||||
Some(&full_path),
|
||||
&cx,
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
|
||||
Some((rel_path, full_path, buffer_content.text, buffer))
|
||||
})
|
||||
}))
|
||||
})?;
|
||||
|
||||
let contents = cx
|
||||
.background_spawn(async move {
|
||||
let (contents, tracked_buffers) = descendants_future
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|(rel_path, full_path, rope, buffer)| {
|
||||
((rel_path, full_path, rope), buffer)
|
||||
})
|
||||
.unzip();
|
||||
Mention::Text {
|
||||
content: render_directory_contents(contents),
|
||||
tracked_buffers,
|
||||
}
|
||||
})
|
||||
.await;
|
||||
anyhow::Ok(contents)
|
||||
})
|
||||
}
|
||||
|
||||
fn render_directory_contents(entries: Vec<(Arc<RelPath>, String, String)>) -> String {
|
||||
let mut output = String::new();
|
||||
for (_relative_path, full_path, content) in entries {
|
||||
let fence = codeblock_fence_for_path(Some(&full_path), None);
|
||||
@@ -1288,18 +1293,14 @@ impl Render for MessageEditor {
|
||||
.flex_1()
|
||||
.child({
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let font_size = TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(settings.agent_font_size(cx));
|
||||
let line_height = settings.buffer_line_height.value() * font_size;
|
||||
|
||||
let text_style = TextStyle {
|
||||
color: cx.theme().colors().text,
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_fallbacks: settings.buffer_font.fallbacks.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: font_size.into(),
|
||||
line_height: line_height.into(),
|
||||
font_size: settings.buffer_font_size(cx).into(),
|
||||
line_height: relative(settings.buffer_line_height.value()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
@@ -1514,6 +1515,8 @@ impl MentionSet {
|
||||
fn contents(
|
||||
&self,
|
||||
prompt_capabilities: &acp::PromptCapabilities,
|
||||
full_mention_content: bool,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<HashMap<CreaseId, (MentionUri, Mention)>>> {
|
||||
if !prompt_capabilities.embedded_context {
|
||||
@@ -1527,13 +1530,19 @@ impl MentionSet {
|
||||
}
|
||||
|
||||
let mentions = self.mentions.clone();
|
||||
cx.spawn(async move |_cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let mut contents = HashMap::default();
|
||||
for (crease_id, (mention_uri, task)) in mentions {
|
||||
contents.insert(
|
||||
crease_id,
|
||||
(mention_uri, task.await.map_err(|e| anyhow!("{e}"))?),
|
||||
);
|
||||
let content = if full_mention_content
|
||||
&& let MentionUri::Directory { abs_path } = &mention_uri
|
||||
{
|
||||
cx.update(|cx| full_mention_for_directory(&project, abs_path, cx))?
|
||||
.await?
|
||||
} else {
|
||||
task.await.map_err(|e| anyhow!("{e}"))?
|
||||
};
|
||||
|
||||
contents.insert(crease_id, (mention_uri, content));
|
||||
}
|
||||
Ok(contents)
|
||||
})
|
||||
@@ -1593,7 +1602,7 @@ mod tests {
|
||||
use serde_json::json;
|
||||
use text::Point;
|
||||
use ui::{App, Context, IntoElement, Render, SharedString, Window};
|
||||
use util::{path, uri};
|
||||
use util::{path, paths::PathStyle, rel_path::rel_path};
|
||||
use workspace::{AppState, Item, Workspace};
|
||||
|
||||
use crate::acp::{
|
||||
@@ -1694,7 +1703,7 @@ mod tests {
|
||||
});
|
||||
|
||||
let (content, _) = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx))
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -1757,7 +1766,7 @@ mod tests {
|
||||
});
|
||||
|
||||
let contents_result = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx))
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await;
|
||||
|
||||
// Should fail because available_commands is empty (no commands supported)
|
||||
@@ -1780,7 +1789,7 @@ mod tests {
|
||||
});
|
||||
|
||||
let contents_result = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx))
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await;
|
||||
|
||||
assert!(contents_result.is_err());
|
||||
@@ -1795,7 +1804,7 @@ mod tests {
|
||||
});
|
||||
|
||||
let contents_result = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx))
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await;
|
||||
|
||||
// Should succeed because /help is in available_commands
|
||||
@@ -1807,7 +1816,7 @@ mod tests {
|
||||
});
|
||||
|
||||
let (content, _) = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx))
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -1825,7 +1834,7 @@ mod tests {
|
||||
|
||||
// The @ mention functionality should not be affected
|
||||
let (content, _) = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx))
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -2103,16 +2112,18 @@ mod tests {
|
||||
let mut cx = VisualTestContext::from_window(*window, cx);
|
||||
|
||||
let paths = vec![
|
||||
path!("a/one.txt"),
|
||||
path!("a/two.txt"),
|
||||
path!("a/three.txt"),
|
||||
path!("a/four.txt"),
|
||||
path!("b/five.txt"),
|
||||
path!("b/six.txt"),
|
||||
path!("b/seven.txt"),
|
||||
path!("b/eight.txt"),
|
||||
rel_path("a/one.txt"),
|
||||
rel_path("a/two.txt"),
|
||||
rel_path("a/three.txt"),
|
||||
rel_path("a/four.txt"),
|
||||
rel_path("b/five.txt"),
|
||||
rel_path("b/six.txt"),
|
||||
rel_path("b/seven.txt"),
|
||||
rel_path("b/eight.txt"),
|
||||
];
|
||||
|
||||
let slash = PathStyle::local().separator();
|
||||
|
||||
let mut opened_editors = Vec::new();
|
||||
for path in paths {
|
||||
let buffer = workspace
|
||||
@@ -2120,7 +2131,7 @@ mod tests {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Path::new(path).into(),
|
||||
path: path.into(),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
@@ -2181,10 +2192,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
"eight.txt dir/b/",
|
||||
"seven.txt dir/b/",
|
||||
"six.txt dir/b/",
|
||||
"five.txt dir/b/",
|
||||
format!("eight.txt dir{slash}b{slash}"),
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
]
|
||||
);
|
||||
editor.set_text("", window, cx);
|
||||
@@ -2212,14 +2223,14 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
"eight.txt dir/b/",
|
||||
"seven.txt dir/b/",
|
||||
"six.txt dir/b/",
|
||||
"five.txt dir/b/",
|
||||
"Files & Directories",
|
||||
"Symbols",
|
||||
"Threads",
|
||||
"Fetch"
|
||||
format!("eight.txt dir{slash}b{slash}"),
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into(),
|
||||
"Threads".into(),
|
||||
"Fetch".into()
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -2246,7 +2257,10 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Lorem @file one");
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]);
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt dir{slash}a{slash}")]
|
||||
);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2254,7 +2268,11 @@ mod tests {
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
let url_one = uri!("file:///dir/a/one.txt");
|
||||
let url_one = MentionUri::File {
|
||||
abs_path: path!("/dir/a/one.txt").into(),
|
||||
}
|
||||
.to_uri()
|
||||
.to_string();
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
let text = editor.text(cx);
|
||||
assert_eq!(text, format!("Lorem [@one.txt]({url_one}) "));
|
||||
@@ -2271,9 +2289,12 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(&all_prompt_capabilities, cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2290,9 +2311,12 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(&acp::PromptCapabilities::default(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&acp::PromptCapabilities::default(),
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2341,16 +2365,23 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(&all_prompt_capabilities, cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let url_eight = uri!("file:///dir/b/eight.txt");
|
||||
let url_eight = MentionUri::File {
|
||||
abs_path: path!("/dir/b/eight.txt").into(),
|
||||
}
|
||||
.to_uri()
|
||||
.to_string();
|
||||
|
||||
{
|
||||
let [_, (uri, Mention::Text { content, .. })] = contents.as_slice() else {
|
||||
@@ -2449,11 +2480,20 @@ mod tests {
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
let symbol = MentionUri::Symbol {
|
||||
abs_path: path!("/dir/a/one.txt").into(),
|
||||
name: "MySymbol".into(),
|
||||
line_range: 0..=0,
|
||||
};
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(&all_prompt_capabilities, cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2465,12 +2505,7 @@ mod tests {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(content, "1");
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&format!("{url_one}?symbol=MySymbol#L1:1")
|
||||
.parse::<MentionUri>()
|
||||
.unwrap()
|
||||
);
|
||||
pretty_assertions::assert_eq!(uri, &symbol);
|
||||
}
|
||||
|
||||
cx.run_until_parked();
|
||||
@@ -2478,7 +2513,10 @@ mod tests {
|
||||
editor.read_with(&cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ")
|
||||
format!(
|
||||
"Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) ",
|
||||
symbol.to_uri(),
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
@@ -2488,10 +2526,10 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png")
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &["x.png dir/"]);
|
||||
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2501,9 +2539,12 @@ mod tests {
|
||||
// Getting the message contents fails
|
||||
message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(&all_prompt_capabilities, cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.expect_err("Should fail to load x.png");
|
||||
@@ -2514,7 +2555,10 @@ mod tests {
|
||||
editor.read_with(&cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ")
|
||||
format!(
|
||||
"Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) ",
|
||||
symbol.to_uri()
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
@@ -2524,10 +2568,10 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png")
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &["x.png dir/"]);
|
||||
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2539,18 +2583,24 @@ mod tests {
|
||||
|
||||
// Mention was removed
|
||||
editor.read_with(&cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ")
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!(
|
||||
"Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) ",
|
||||
symbol.to_uri()
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
// Now getting the contents succeeds, because the invalid mention was removed
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(&all_prompt_capabilities, cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::{cmp::Reverse, rc::Rc, sync::Arc};
|
||||
|
||||
use acp_thread::{AgentModelInfo, AgentModelList, AgentModelSelector};
|
||||
use agent_client_protocol as acp;
|
||||
use anyhow::Result;
|
||||
use collections::IndexMap;
|
||||
use futures::FutureExt;
|
||||
@@ -10,20 +9,19 @@ use gpui::{Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, W
|
||||
use ordered_float::OrderedFloat;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use ui::{
|
||||
AnyElement, App, Context, IntoElement, ListItem, ListItemSpacing, SharedString, Window,
|
||||
prelude::*, rems,
|
||||
AnyElement, App, Context, DocumentationAside, DocumentationEdge, DocumentationSide,
|
||||
IntoElement, ListItem, ListItemSpacing, SharedString, Window, prelude::*, rems,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
pub type AcpModelSelector = Picker<AcpModelPickerDelegate>;
|
||||
|
||||
pub fn acp_model_selector(
|
||||
session_id: acp::SessionId,
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<AcpModelSelector>,
|
||||
) -> AcpModelSelector {
|
||||
let delegate = AcpModelPickerDelegate::new(session_id, selector, window, cx);
|
||||
let delegate = AcpModelPickerDelegate::new(selector, window, cx);
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(20.))
|
||||
@@ -36,61 +34,63 @@ enum AcpModelPickerEntry {
|
||||
}
|
||||
|
||||
pub struct AcpModelPickerDelegate {
|
||||
session_id: acp::SessionId,
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
filtered_entries: Vec<AcpModelPickerEntry>,
|
||||
models: Option<AgentModelList>,
|
||||
selected_index: usize,
|
||||
selected_description: Option<(usize, SharedString)>,
|
||||
selected_model: Option<AgentModelInfo>,
|
||||
_refresh_models_task: Task<()>,
|
||||
}
|
||||
|
||||
impl AcpModelPickerDelegate {
|
||||
fn new(
|
||||
session_id: acp::SessionId,
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<AcpModelSelector>,
|
||||
) -> Self {
|
||||
let mut rx = selector.watch(cx);
|
||||
let refresh_models_task = cx.spawn_in(window, {
|
||||
let session_id = session_id.clone();
|
||||
async move |this, cx| {
|
||||
async fn refresh(
|
||||
this: &WeakEntity<Picker<AcpModelPickerDelegate>>,
|
||||
session_id: &acp::SessionId,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let (models_task, selected_model_task) = this.update(cx, |this, cx| {
|
||||
(
|
||||
this.delegate.selector.list_models(cx),
|
||||
this.delegate.selector.selected_model(session_id, cx),
|
||||
)
|
||||
})?;
|
||||
let rx = selector.watch(cx);
|
||||
let refresh_models_task = {
|
||||
cx.spawn_in(window, {
|
||||
async move |this, cx| {
|
||||
async fn refresh(
|
||||
this: &WeakEntity<Picker<AcpModelPickerDelegate>>,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let (models_task, selected_model_task) = this.update(cx, |this, cx| {
|
||||
(
|
||||
this.delegate.selector.list_models(cx),
|
||||
this.delegate.selector.selected_model(cx),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (models, selected_model) = futures::join!(models_task, selected_model_task);
|
||||
let (models, selected_model) =
|
||||
futures::join!(models_task, selected_model_task);
|
||||
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.models = models.ok();
|
||||
this.delegate.selected_model = selected_model.ok();
|
||||
this.refresh(window, cx)
|
||||
})
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.models = models.ok();
|
||||
this.delegate.selected_model = selected_model.ok();
|
||||
this.refresh(window, cx)
|
||||
})
|
||||
}
|
||||
|
||||
refresh(&this, cx).await.log_err();
|
||||
if let Some(mut rx) = rx {
|
||||
while let Ok(()) = rx.recv().await {
|
||||
refresh(&this, cx).await.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
refresh(&this, &session_id, cx).await.log_err();
|
||||
while let Ok(()) = rx.recv().await {
|
||||
refresh(&this, &session_id, cx).await.log_err();
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
};
|
||||
|
||||
Self {
|
||||
session_id,
|
||||
selector,
|
||||
filtered_entries: Vec::new(),
|
||||
models: None,
|
||||
selected_model: None,
|
||||
selected_index: 0,
|
||||
selected_description: None,
|
||||
_refresh_models_task: refresh_models_task,
|
||||
}
|
||||
}
|
||||
@@ -182,7 +182,7 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
self.filtered_entries.get(self.selected_index)
|
||||
{
|
||||
self.selector
|
||||
.select_model(self.session_id.clone(), model_info.id.clone(), cx)
|
||||
.select_model(model_info.id.clone(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
self.selected_model = Some(model_info.clone());
|
||||
let current_index = self.selected_index;
|
||||
@@ -233,31 +233,46 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
};
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.start_slot::<Icon>(model_info.icon.map(|icon| {
|
||||
Icon::new(icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small)
|
||||
}))
|
||||
div()
|
||||
.id(("model-picker-menu-child", ix))
|
||||
.when_some(model_info.description.clone(), |this, description| {
|
||||
this
|
||||
.on_hover(cx.listener(move |menu, hovered, _, cx| {
|
||||
if *hovered {
|
||||
menu.delegate.selected_description = Some((ix, description.clone()));
|
||||
} else if matches!(menu.delegate.selected_description, Some((id, _)) if id == ix) {
|
||||
menu.delegate.selected_description = None;
|
||||
}
|
||||
cx.notify();
|
||||
}))
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.pl_0p5()
|
||||
.gap_1p5()
|
||||
.w(px(240.))
|
||||
.child(Label::new(model_info.name.clone()).truncate()),
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.start_slot::<Icon>(model_info.icon.map(|icon| {
|
||||
Icon::new(icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small)
|
||||
}))
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.pl_0p5()
|
||||
.gap_1p5()
|
||||
.w(px(240.))
|
||||
.child(Label::new(model_info.name.clone()).truncate()),
|
||||
)
|
||||
.end_slot(div().pr_3().when(is_selected, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Accent)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
})),
|
||||
)
|
||||
.end_slot(div().pr_3().when(is_selected, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Accent)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
}))
|
||||
.into_any_element(),
|
||||
.into_any_element()
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -292,6 +307,21 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
|
||||
fn documentation_aside(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<ui::DocumentationAside> {
|
||||
self.selected_description.as_ref().map(|(_, description)| {
|
||||
let description = description.clone();
|
||||
DocumentationAside::new(
|
||||
DocumentationSide::Left,
|
||||
DocumentationEdge::Bottom,
|
||||
Rc::new(move |_| Label::new(description.clone()).into_any_element()),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn info_list_to_picker_entries(
|
||||
@@ -371,6 +401,7 @@ async fn fuzzy_search(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use agent_client_protocol as acp;
|
||||
use gpui::TestAppContext;
|
||||
|
||||
use super::*;
|
||||
@@ -383,8 +414,9 @@ mod tests {
|
||||
models
|
||||
.into_iter()
|
||||
.map(|model| acp_thread::AgentModelInfo {
|
||||
id: acp_thread::AgentModelId(model.to_string().into()),
|
||||
id: acp::ModelId(model.to_string().into()),
|
||||
name: model.to_string().into(),
|
||||
description: None,
|
||||
icon: None,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use acp_thread::AgentModelSelector;
|
||||
use agent_client_protocol as acp;
|
||||
use gpui::{Entity, FocusHandle};
|
||||
use picker::popover_menu::PickerPopoverMenu;
|
||||
use ui::{
|
||||
@@ -20,7 +19,6 @@ pub struct AcpModelSelectorPopover {
|
||||
|
||||
impl AcpModelSelectorPopover {
|
||||
pub(crate) fn new(
|
||||
session_id: acp::SessionId,
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
menu_handle: PopoverMenuHandle<AcpModelSelector>,
|
||||
focus_handle: FocusHandle,
|
||||
@@ -28,7 +26,7 @@ impl AcpModelSelectorPopover {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
Self {
|
||||
selector: cx.new(move |cx| acp_model_selector(session_id, selector, window, cx)),
|
||||
selector: cx.new(move |cx| acp_model_selector(selector, window, cx)),
|
||||
menu_handle,
|
||||
focus_handle,
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use agent_client_protocol::{self as acp, PromptCapabilities};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use arrayvec::ArrayVec;
|
||||
use audio::{Audio, Sound};
|
||||
use buffer_diff::BufferDiff;
|
||||
@@ -577,23 +577,21 @@ impl AcpThreadView {
|
||||
|
||||
AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx);
|
||||
|
||||
this.model_selector =
|
||||
thread
|
||||
.read(cx)
|
||||
.connection()
|
||||
.model_selector()
|
||||
.map(|selector| {
|
||||
cx.new(|cx| {
|
||||
AcpModelSelectorPopover::new(
|
||||
thread.read(cx).session_id().clone(),
|
||||
selector,
|
||||
PopoverMenuHandle::default(),
|
||||
this.focus_handle(cx),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
this.model_selector = thread
|
||||
.read(cx)
|
||||
.connection()
|
||||
.model_selector(thread.read(cx).session_id())
|
||||
.map(|selector| {
|
||||
cx.new(|cx| {
|
||||
AcpModelSelectorPopover::new(
|
||||
selector,
|
||||
PopoverMenuHandle::default(),
|
||||
this.focus_handle(cx),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
let mode_selector = thread
|
||||
.read(cx)
|
||||
@@ -1040,10 +1038,7 @@ impl AcpThreadView {
|
||||
return;
|
||||
}
|
||||
|
||||
let contents = self
|
||||
.message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx));
|
||||
self.send_impl(contents, window, cx)
|
||||
self.send_impl(self.message_editor.clone(), window, cx)
|
||||
}
|
||||
|
||||
fn stop_current_and_send_new_message(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -1053,15 +1048,11 @@ impl AcpThreadView {
|
||||
|
||||
let cancelled = thread.update(cx, |thread, cx| thread.cancel(cx));
|
||||
|
||||
let contents = self
|
||||
.message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(cx));
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
cancelled.await;
|
||||
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.send_impl(contents, window, cx);
|
||||
this.send_impl(this.message_editor.clone(), window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -1070,10 +1061,23 @@ impl AcpThreadView {
|
||||
|
||||
fn send_impl(
|
||||
&mut self,
|
||||
contents: Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>>,
|
||||
message_editor: Entity<MessageEditor>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| {
|
||||
// Include full contents when using minimal profile
|
||||
let thread = thread.read(cx);
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(thread.profile())
|
||||
.is_some_and(|profile| profile.tools.is_empty())
|
||||
});
|
||||
|
||||
let contents = message_editor.update(cx, |message_editor, cx| {
|
||||
message_editor.contents(full_mention_content, cx)
|
||||
});
|
||||
|
||||
let agent_telemetry_id = self.agent.telemetry_id();
|
||||
|
||||
self.thread_error.take();
|
||||
@@ -1202,10 +1206,8 @@ impl AcpThreadView {
|
||||
thread
|
||||
.update(cx, |thread, cx| thread.rewind(user_message_id, cx))?
|
||||
.await?;
|
||||
let contents =
|
||||
message_editor.update(cx, |message_editor, cx| message_editor.contents(cx))?;
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.send_impl(contents, window, cx);
|
||||
this.send_impl(message_editor, window, cx);
|
||||
})?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
@@ -1582,6 +1584,19 @@ impl AcpThreadView {
|
||||
|
||||
window.spawn(cx, async move |cx| {
|
||||
let mut task = login.clone();
|
||||
task.command = task
|
||||
.command
|
||||
.map(|command| anyhow::Ok(shlex::try_quote(&command)?.to_string()))
|
||||
.transpose()?;
|
||||
task.args = task
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
Ok(shlex::try_quote(arg)
|
||||
.context("Failed to quote argument")?
|
||||
.to_string())
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
task.full_label = task.label.clone();
|
||||
task.id = task::TaskId(format!("external-agent-{}-login", task.label));
|
||||
task.command_label = task.label.clone();
|
||||
@@ -1591,7 +1606,7 @@ impl AcpThreadView {
|
||||
task.shell = shell;
|
||||
|
||||
let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||
terminal_panel.spawn_task(&login, window, cx)
|
||||
terminal_panel.spawn_task(&task, window, cx)
|
||||
})?;
|
||||
|
||||
let terminal = terminal.await?;
|
||||
@@ -2064,27 +2079,6 @@ impl AcpThreadView {
|
||||
let has_location = tool_call.locations.len() == 1;
|
||||
let card_header_id = SharedString::from("inner-tool-call-header");
|
||||
|
||||
let tool_icon = if tool_call.kind == acp::ToolKind::Edit && has_location {
|
||||
FileIcons::get_icon(&tool_call.locations[0].path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or(Icon::new(IconName::ToolPencil))
|
||||
} else {
|
||||
Icon::new(match tool_call.kind {
|
||||
acp::ToolKind::Read => IconName::ToolSearch,
|
||||
acp::ToolKind::Edit => IconName::ToolPencil,
|
||||
acp::ToolKind::Delete => IconName::ToolDeleteFile,
|
||||
acp::ToolKind::Move => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Search => IconName::ToolSearch,
|
||||
acp::ToolKind::Execute => IconName::ToolTerminal,
|
||||
acp::ToolKind::Think => IconName::ToolThink,
|
||||
acp::ToolKind::Fetch => IconName::ToolWeb,
|
||||
acp::ToolKind::SwitchMode => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Other => IconName::ToolHammer,
|
||||
})
|
||||
}
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted);
|
||||
|
||||
let failed_or_canceled = match &tool_call.status {
|
||||
ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed => true,
|
||||
_ => false,
|
||||
@@ -2094,41 +2088,16 @@ impl AcpThreadView {
|
||||
tool_call.status,
|
||||
ToolCallStatus::WaitingForConfirmation { .. }
|
||||
);
|
||||
let is_terminal_tool = matches!(tool_call.kind, acp::ToolKind::Execute);
|
||||
let is_edit =
|
||||
matches!(tool_call.kind, acp::ToolKind::Edit) || tool_call.diffs().next().is_some();
|
||||
let use_card_layout = needs_confirmation || is_edit;
|
||||
|
||||
let use_card_layout = needs_confirmation || is_edit || is_terminal_tool;
|
||||
|
||||
let is_collapsible = !tool_call.content.is_empty() && !needs_confirmation;
|
||||
|
||||
let is_open = needs_confirmation || self.expanded_tool_calls.contains(&tool_call.id);
|
||||
|
||||
let gradient_overlay = {
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w_12()
|
||||
.h_full()
|
||||
.map(|this| {
|
||||
if use_card_layout {
|
||||
this.bg(linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(self.tool_card_header_bg(cx), 1.),
|
||||
linear_color_stop(self.tool_card_header_bg(cx).opacity(0.2), 0.),
|
||||
))
|
||||
} else {
|
||||
this.bg(linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(cx.theme().colors().panel_background, 1.),
|
||||
linear_color_stop(
|
||||
cx.theme().colors().panel_background.opacity(0.2),
|
||||
0.,
|
||||
),
|
||||
))
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
let tool_output_display =
|
||||
if is_open {
|
||||
match &tool_call.status {
|
||||
@@ -2213,104 +2182,202 @@ impl AcpThreadView {
|
||||
}
|
||||
})
|
||||
.mr_5()
|
||||
.child(
|
||||
h_flex()
|
||||
.group(&card_header_id)
|
||||
.relative()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.justify_between()
|
||||
.when(use_card_layout, |this| {
|
||||
this.p_0p5()
|
||||
.rounded_t(rems_from_px(5.))
|
||||
.map(|this| {
|
||||
if is_terminal_tool {
|
||||
this.child(
|
||||
v_flex()
|
||||
.p_1p5()
|
||||
.gap_0p5()
|
||||
.text_ui_sm(cx)
|
||||
.bg(self.tool_card_header_bg(cx))
|
||||
})
|
||||
.child(
|
||||
.child(
|
||||
Label::new("Run Command")
|
||||
.buffer_font(cx)
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
MarkdownElement::new(
|
||||
tool_call.label.clone(),
|
||||
terminal_command_markdown_style(window, cx),
|
||||
)
|
||||
.code_block_renderer(
|
||||
markdown::CodeBlockRenderer::Default {
|
||||
copy_button: false,
|
||||
copy_button_on_hover: false,
|
||||
border: false,
|
||||
},
|
||||
)
|
||||
),
|
||||
)
|
||||
} else {
|
||||
this.child(
|
||||
h_flex()
|
||||
.group(&card_header_id)
|
||||
.relative()
|
||||
.w_full()
|
||||
.h(window.line_height() - px(2.))
|
||||
.text_size(self.tool_name_font_size())
|
||||
.gap_1p5()
|
||||
.when(has_location || use_card_layout, |this| this.px_1())
|
||||
.when(has_location, |this| {
|
||||
this.cursor(CursorStyle::PointingHand)
|
||||
.rounded(rems_from_px(3.)) // Concentric border radius
|
||||
.hover(|s| s.bg(cx.theme().colors().element_hover.opacity(0.5)))
|
||||
.gap_1()
|
||||
.justify_between()
|
||||
.when(use_card_layout, |this| {
|
||||
this.p_0p5()
|
||||
.rounded_t(rems_from_px(5.))
|
||||
.bg(self.tool_card_header_bg(cx))
|
||||
})
|
||||
.overflow_hidden()
|
||||
.child(tool_icon)
|
||||
.child(if has_location {
|
||||
h_flex()
|
||||
.id(("open-tool-call-location", entry_ix))
|
||||
.w_full()
|
||||
.map(|this| {
|
||||
if use_card_layout {
|
||||
this.text_color(cx.theme().colors().text)
|
||||
} else {
|
||||
this.text_color(cx.theme().colors().text_muted)
|
||||
}
|
||||
})
|
||||
.child(self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
MarkdownStyle {
|
||||
prevent_mouse_interaction: true,
|
||||
..default_markdown_style(false, true, window, cx)
|
||||
},
|
||||
))
|
||||
.tooltip(Tooltip::text("Jump to File"))
|
||||
.on_click(cx.listener(move |this, _, window, cx| {
|
||||
this.open_tool_call_location(entry_ix, 0, window, cx);
|
||||
}))
|
||||
.into_any_element()
|
||||
} else {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.child(self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
default_markdown_style(false, true, window, cx),
|
||||
))
|
||||
.into_any()
|
||||
})
|
||||
.when(!has_location, |this| this.child(gradient_overlay)),
|
||||
)
|
||||
.when(is_collapsible || failed_or_canceled, |this| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.gap_px()
|
||||
.when(is_collapsible, |this| {
|
||||
this.child(
|
||||
Disclosure::new(("expand", entry_ix), is_open)
|
||||
.opened_icon(IconName::ChevronUp)
|
||||
.closed_icon(IconName::ChevronDown)
|
||||
.visible_on_hover(&card_header_id)
|
||||
.on_click(cx.listener({
|
||||
let id = tool_call.id.clone();
|
||||
move |this: &mut Self, _, _, cx: &mut Context<Self>| {
|
||||
if is_open {
|
||||
this.expanded_tool_calls.remove(&id);
|
||||
} else {
|
||||
this.expanded_tool_calls.insert(id.clone());
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
})),
|
||||
.child(self.render_tool_call_label(
|
||||
entry_ix,
|
||||
tool_call,
|
||||
is_edit,
|
||||
use_card_layout,
|
||||
window,
|
||||
cx,
|
||||
))
|
||||
.when(is_collapsible || failed_or_canceled, |this| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.gap_px()
|
||||
.when(is_collapsible, |this| {
|
||||
this.child(
|
||||
Disclosure::new(("expand", entry_ix), is_open)
|
||||
.opened_icon(IconName::ChevronUp)
|
||||
.closed_icon(IconName::ChevronDown)
|
||||
.visible_on_hover(&card_header_id)
|
||||
.on_click(cx.listener({
|
||||
let id = tool_call.id.clone();
|
||||
move |this: &mut Self, _, _, cx: &mut Context<Self>| {
|
||||
if is_open {
|
||||
this.expanded_tool_calls.remove(&id);
|
||||
} else {
|
||||
this.expanded_tool_calls.insert(id.clone());
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
})),
|
||||
)
|
||||
})
|
||||
.when(failed_or_canceled, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Close)
|
||||
.color(Color::Error)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.when(failed_or_canceled, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Close)
|
||||
.color(Color::Error)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
})
|
||||
.children(tool_output_display)
|
||||
}
|
||||
|
||||
fn render_tool_call_label(
|
||||
&self,
|
||||
entry_ix: usize,
|
||||
tool_call: &ToolCall,
|
||||
is_edit: bool,
|
||||
use_card_layout: bool,
|
||||
window: &Window,
|
||||
cx: &Context<Self>,
|
||||
) -> Div {
|
||||
let has_location = tool_call.locations.len() == 1;
|
||||
|
||||
let tool_icon = if tool_call.kind == acp::ToolKind::Edit && has_location {
|
||||
FileIcons::get_icon(&tool_call.locations[0].path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or(Icon::new(IconName::ToolPencil))
|
||||
} else {
|
||||
Icon::new(match tool_call.kind {
|
||||
acp::ToolKind::Read => IconName::ToolSearch,
|
||||
acp::ToolKind::Edit => IconName::ToolPencil,
|
||||
acp::ToolKind::Delete => IconName::ToolDeleteFile,
|
||||
acp::ToolKind::Move => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Search => IconName::ToolSearch,
|
||||
acp::ToolKind::Execute => IconName::ToolTerminal,
|
||||
acp::ToolKind::Think => IconName::ToolThink,
|
||||
acp::ToolKind::Fetch => IconName::ToolWeb,
|
||||
acp::ToolKind::SwitchMode => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Other => IconName::ToolHammer,
|
||||
})
|
||||
}
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted);
|
||||
|
||||
let gradient_overlay = {
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w_12()
|
||||
.h_full()
|
||||
.map(|this| {
|
||||
if use_card_layout {
|
||||
this.bg(linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(self.tool_card_header_bg(cx), 1.),
|
||||
linear_color_stop(self.tool_card_header_bg(cx).opacity(0.2), 0.),
|
||||
))
|
||||
} else {
|
||||
this.bg(linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(cx.theme().colors().panel_background, 1.),
|
||||
linear_color_stop(
|
||||
cx.theme().colors().panel_background.opacity(0.2),
|
||||
0.,
|
||||
),
|
||||
))
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.relative()
|
||||
.w_full()
|
||||
.h(window.line_height() - px(2.))
|
||||
.text_size(self.tool_name_font_size())
|
||||
.gap_1p5()
|
||||
.when(has_location || use_card_layout, |this| this.px_1())
|
||||
.when(has_location, |this| {
|
||||
this.cursor(CursorStyle::PointingHand)
|
||||
.rounded(rems_from_px(3.)) // Concentric border radius
|
||||
.hover(|s| s.bg(cx.theme().colors().element_hover.opacity(0.5)))
|
||||
})
|
||||
.overflow_hidden()
|
||||
.child(tool_icon)
|
||||
.child(if has_location {
|
||||
h_flex()
|
||||
.id(("open-tool-call-location", entry_ix))
|
||||
.w_full()
|
||||
.map(|this| {
|
||||
if use_card_layout {
|
||||
this.text_color(cx.theme().colors().text)
|
||||
} else {
|
||||
this.text_color(cx.theme().colors().text_muted)
|
||||
}
|
||||
})
|
||||
.child(self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
MarkdownStyle {
|
||||
prevent_mouse_interaction: true,
|
||||
..default_markdown_style(false, true, window, cx)
|
||||
},
|
||||
))
|
||||
.tooltip(Tooltip::text("Jump to File"))
|
||||
.on_click(cx.listener(move |this, _, window, cx| {
|
||||
this.open_tool_call_location(entry_ix, 0, window, cx);
|
||||
}))
|
||||
.into_any_element()
|
||||
} else {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.child(self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
default_markdown_style(false, true, window, cx),
|
||||
))
|
||||
.into_any()
|
||||
})
|
||||
.when(!is_edit, |this| this.child(gradient_overlay))
|
||||
}
|
||||
|
||||
fn render_tool_call_content(
|
||||
&self,
|
||||
entry_ix: usize,
|
||||
@@ -3637,29 +3704,32 @@ impl AcpThreadView {
|
||||
|(index, (buffer, _diff))| {
|
||||
let file = buffer.read(cx).file()?;
|
||||
let path = file.path();
|
||||
let path_style = file.path_style(cx);
|
||||
let separator = file.path_style(cx).separator();
|
||||
|
||||
let file_path = path.parent().and_then(|parent| {
|
||||
let parent_str = parent.to_string_lossy();
|
||||
|
||||
if parent_str.is_empty() {
|
||||
if parent.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!("/{}{}", parent_str, std::path::MAIN_SEPARATOR_STR))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
Label::new(format!(
|
||||
"{separator}{}{separator}",
|
||||
parent.display(path_style)
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let file_name = path.file_name().map(|name| {
|
||||
Label::new(name.to_string_lossy().to_string())
|
||||
Label::new(name.to_string())
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx)
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(path, cx)
|
||||
let file_icon = FileIcons::get_icon(path.as_std_path(), cx)
|
||||
.map(Icon::from_path)
|
||||
.map(|icon| icon.color(Color::Muted).size(IconSize::Small))
|
||||
.unwrap_or_else(|| {
|
||||
@@ -4502,7 +4572,7 @@ impl AcpThreadView {
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.map(|worktree| worktree.read(cx).root_name().to_string())
|
||||
.map(|worktree| worktree.read(cx).root_name_str().to_string())
|
||||
});
|
||||
|
||||
if let Some(screen_window) = cx
|
||||
@@ -5669,23 +5739,6 @@ pub(crate) mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_spawn_external_agent_login_handles_spaces(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// Verify paths with spaces aren't pre-quoted
|
||||
let path_with_spaces = "/Users/test/Library/Application Support/Zed/cli.js";
|
||||
let login_task = task::SpawnInTerminal {
|
||||
command: Some("node".to_string()),
|
||||
args: vec![path_with_spaces.to_string(), "/login".to_string()],
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Args should be passed as-is, not pre-quoted
|
||||
assert!(!login_task.args[0].starts_with('"'));
|
||||
assert!(!login_task.args[0].starts_with('\''));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_notification_for_tool_authorization(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -543,35 +543,23 @@ impl AgentConfiguration {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let mut registry_descriptors = self
|
||||
let mut context_server_ids = self
|
||||
.context_server_store
|
||||
.read(cx)
|
||||
.all_registry_descriptor_ids(cx);
|
||||
let server_count = registry_descriptors.len();
|
||||
.server_ids(cx)
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Sort context servers: non-mcp-server ones first, then mcp-server ones
|
||||
registry_descriptors.sort_by(|a, b| {
|
||||
let has_mcp_prefix_a = a.0.starts_with("mcp-server-");
|
||||
let has_mcp_prefix_b = b.0.starts_with("mcp-server-");
|
||||
|
||||
match (has_mcp_prefix_a, has_mcp_prefix_b) {
|
||||
// Sort context servers: ones without mcp-server- prefix first, then prefixed ones
|
||||
context_server_ids.sort_by(|a, b| {
|
||||
const MCP_PREFIX: &str = "mcp-server-";
|
||||
match (a.0.strip_prefix(MCP_PREFIX), b.0.strip_prefix(MCP_PREFIX)) {
|
||||
// If one has mcp-server- prefix and other doesn't, non-mcp comes first
|
||||
(true, false) => std::cmp::Ordering::Greater,
|
||||
(false, true) => std::cmp::Ordering::Less,
|
||||
(Some(_), None) => std::cmp::Ordering::Greater,
|
||||
(None, Some(_)) => std::cmp::Ordering::Less,
|
||||
// If both have same prefix status, sort by appropriate key
|
||||
_ => {
|
||||
let get_sort_key = |server_id: &str| -> String {
|
||||
if let Some(suffix) = server_id.strip_prefix("mcp-server-") {
|
||||
suffix.to_string()
|
||||
} else {
|
||||
server_id.to_string()
|
||||
}
|
||||
};
|
||||
|
||||
let key_a = get_sort_key(&a.0);
|
||||
let key_b = get_sort_key(&b.0);
|
||||
key_a.cmp(&key_b)
|
||||
}
|
||||
(Some(a), Some(b)) => a.cmp(b),
|
||||
(None, None) => a.0.cmp(&b.0),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -636,8 +624,8 @@ impl AgentConfiguration {
|
||||
)
|
||||
.child(add_server_popover),
|
||||
)
|
||||
.child(v_flex().w_full().gap_1().map(|parent| {
|
||||
if registry_descriptors.is_empty() {
|
||||
.child(v_flex().w_full().gap_1().map(|mut parent| {
|
||||
if context_server_ids.is_empty() {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.p_4()
|
||||
@@ -653,26 +641,18 @@ impl AgentConfiguration {
|
||||
),
|
||||
)
|
||||
} else {
|
||||
{
|
||||
parent.children(registry_descriptors.into_iter().enumerate().flat_map(
|
||||
|(index, context_server_id)| {
|
||||
let mut elements: Vec<AnyElement> = vec![
|
||||
self.render_context_server(context_server_id, window, cx)
|
||||
.into_any_element(),
|
||||
];
|
||||
|
||||
if index < server_count - 1 {
|
||||
elements.push(
|
||||
Divider::horizontal()
|
||||
.color(DividerColor::BorderFaded)
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
|
||||
elements
|
||||
},
|
||||
))
|
||||
for (index, context_server_id) in context_server_ids.into_iter().enumerate() {
|
||||
if index > 0 {
|
||||
parent = parent.child(
|
||||
Divider::horizontal()
|
||||
.color(DividerColor::BorderFaded)
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
parent =
|
||||
parent.child(self.render_context_server(context_server_id, window, cx));
|
||||
}
|
||||
parent
|
||||
}
|
||||
}))
|
||||
}
|
||||
@@ -1106,7 +1086,13 @@ impl AgentConfiguration {
|
||||
IconName::AiClaude,
|
||||
"Claude Code",
|
||||
))
|
||||
.children(user_defined_agents),
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(agent);
|
||||
}
|
||||
parent
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::ops::{Not, Range};
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
@@ -408,6 +408,7 @@ impl ActiveView {
|
||||
|
||||
pub struct AgentPanel {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
loading: bool,
|
||||
user_store: Entity<UserStore>,
|
||||
project: Entity<Project>,
|
||||
fs: Arc<dyn Fs>,
|
||||
@@ -513,6 +514,7 @@ impl AgentPanel {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
panel.as_mut(cx).loading = true;
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.width = serialized_panel.width.map(|w| w.round());
|
||||
@@ -527,6 +529,7 @@ impl AgentPanel {
|
||||
panel.new_agent_thread(AgentType::NativeAgent, window, cx);
|
||||
});
|
||||
}
|
||||
panel.as_mut(cx).loading = false;
|
||||
panel
|
||||
})?;
|
||||
|
||||
@@ -662,6 +665,43 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
let mut old_disable_ai = false;
|
||||
cx.observe_global_in::<SettingsStore>(window, move |panel, window, cx| {
|
||||
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
|
||||
if old_disable_ai != disable_ai {
|
||||
let agent_panel_id = cx.entity_id();
|
||||
let agent_panel_visible = panel
|
||||
.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let agent_dock_position = panel.position(window, cx);
|
||||
let agent_dock = workspace.dock_at_position(agent_dock_position);
|
||||
let agent_panel_focused = agent_dock
|
||||
.read(cx)
|
||||
.active_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
let active_panel_visible = agent_dock
|
||||
.read(cx)
|
||||
.visible_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
if agent_panel_focused {
|
||||
cx.dispatch_action(&ToggleFocus);
|
||||
}
|
||||
|
||||
active_panel_visible
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if agent_panel_visible {
|
||||
cx.emit(PanelEvent::Close);
|
||||
}
|
||||
|
||||
old_disable_ai = disable_ai;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
active_view,
|
||||
workspace,
|
||||
@@ -674,11 +714,9 @@ impl AgentPanel {
|
||||
prompt_store,
|
||||
configuration: None,
|
||||
configuration_subscription: None,
|
||||
|
||||
inline_assist_context_store,
|
||||
previous_view: None,
|
||||
history_store: history_store.clone(),
|
||||
|
||||
new_thread_menu_handle: PopoverMenuHandle::default(),
|
||||
agent_panel_menu_handle: PopoverMenuHandle::default(),
|
||||
assistant_navigation_menu_handle: PopoverMenuHandle::default(),
|
||||
@@ -691,6 +729,7 @@ impl AgentPanel {
|
||||
acp_history,
|
||||
acp_history_store,
|
||||
selected_agent: AgentType::default(),
|
||||
loading: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -703,7 +742,6 @@ impl AgentPanel {
|
||||
if workspace
|
||||
.panel::<Self>(cx)
|
||||
.is_some_and(|panel| panel.read(cx).enabled(cx))
|
||||
&& !DisableAiSettings::get_global(cx).disable_ai
|
||||
{
|
||||
workspace.toggle_panel_focus::<Self>(window, cx);
|
||||
}
|
||||
@@ -823,6 +861,7 @@ impl AgentPanel {
|
||||
agent: crate::ExternalAgent,
|
||||
}
|
||||
|
||||
let loading = self.loading;
|
||||
let history = self.acp_history_store.clone();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
@@ -864,7 +903,9 @@ impl AgentPanel {
|
||||
}
|
||||
};
|
||||
|
||||
telemetry::event!("Agent Thread Started", agent = ext_agent.name());
|
||||
if !loading {
|
||||
telemetry::event!("Agent Thread Started", agent = ext_agent.name());
|
||||
}
|
||||
|
||||
let server = ext_agent.server(fs, history);
|
||||
|
||||
@@ -1067,7 +1108,7 @@ impl AgentPanel {
|
||||
let _ = settings
|
||||
.theme
|
||||
.agent_font_size
|
||||
.insert(Some(theme::clamp_font_size(agent_font_size).into()));
|
||||
.insert(theme::clamp_font_size(agent_font_size).into());
|
||||
});
|
||||
} else {
|
||||
theme::adjust_agent_font_size(cx, |size| size + delta);
|
||||
@@ -1499,7 +1540,7 @@ impl Panel for AgentPanel {
|
||||
}
|
||||
|
||||
fn enabled(&self, cx: &App) -> bool {
|
||||
DisableAiSettings::get_global(cx).disable_ai.not() && AgentSettings::get_global(cx).enabled
|
||||
AgentSettings::get_global(cx).enabled(cx)
|
||||
}
|
||||
|
||||
fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
|
||||
|
||||
@@ -264,7 +264,7 @@ pub fn init(
|
||||
init_language_model_settings(cx);
|
||||
}
|
||||
assistant_slash_command::init(cx);
|
||||
agent::init(cx);
|
||||
agent::init(fs.clone(), cx);
|
||||
agent_panel::init(cx);
|
||||
context_server_configuration::init(language_registry.clone(), fs.clone(), cx);
|
||||
TextThreadEditor::init(cx);
|
||||
|
||||
@@ -33,6 +33,8 @@ use thread_context_picker::{
|
||||
use ui::{
|
||||
ButtonLike, ContextMenu, ContextMenuEntry, ContextMenuItem, Disclosure, TintColor, prelude::*,
|
||||
};
|
||||
use util::paths::PathStyle;
|
||||
use util::rel_path::RelPath;
|
||||
use workspace::{Workspace, notifications::NotifyResultExt};
|
||||
|
||||
use agent::{
|
||||
@@ -228,12 +230,19 @@ impl ContextPicker {
|
||||
let context_picker = cx.entity();
|
||||
|
||||
let menu = ContextMenu::build(window, cx, move |menu, _window, cx| {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return menu;
|
||||
};
|
||||
let path_style = workspace.read(cx).path_style(cx);
|
||||
let recent = self.recent_entries(cx);
|
||||
let has_recent = !recent.is_empty();
|
||||
let recent_entries = recent
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry));
|
||||
.map(|(ix, entry)| {
|
||||
self.recent_menu_item(context_picker.clone(), ix, entry, path_style)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let entries = self
|
||||
.workspace
|
||||
@@ -395,6 +404,7 @@ impl ContextPicker {
|
||||
context_picker: Entity<ContextPicker>,
|
||||
ix: usize,
|
||||
entry: RecentEntry,
|
||||
path_style: PathStyle,
|
||||
) -> ContextMenuItem {
|
||||
match entry {
|
||||
RecentEntry::File {
|
||||
@@ -413,6 +423,7 @@ impl ContextPicker {
|
||||
&path,
|
||||
&path_prefix,
|
||||
false,
|
||||
path_style,
|
||||
context_store.clone(),
|
||||
cx,
|
||||
)
|
||||
@@ -586,7 +597,7 @@ impl Render for ContextPicker {
|
||||
pub(crate) enum RecentEntry {
|
||||
File {
|
||||
project_path: ProjectPath,
|
||||
path_prefix: Arc<str>,
|
||||
path_prefix: Arc<RelPath>,
|
||||
},
|
||||
Thread(ThreadContextEntry),
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ use http_client::HttpClientWithUrl;
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::lsp_store::SymbolLocation;
|
||||
use project::{
|
||||
Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, ProjectPath,
|
||||
Symbol, WorktreeId,
|
||||
@@ -22,6 +23,8 @@ use rope::Point;
|
||||
use text::{Anchor, OffsetRangeExt, ToPoint};
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt as _;
|
||||
use util::paths::PathStyle;
|
||||
use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
use agent::{
|
||||
@@ -574,11 +577,12 @@ impl ContextPickerCompletionProvider {
|
||||
|
||||
fn completion_for_path(
|
||||
project_path: ProjectPath,
|
||||
path_prefix: &str,
|
||||
path_prefix: &RelPath,
|
||||
is_recent: bool,
|
||||
is_directory: bool,
|
||||
excerpt_id: ExcerptId,
|
||||
source_range: Range<Anchor>,
|
||||
path_style: PathStyle,
|
||||
editor: Entity<Editor>,
|
||||
context_store: Entity<ContextStore>,
|
||||
cx: &App,
|
||||
@@ -586,6 +590,7 @@ impl ContextPickerCompletionProvider {
|
||||
let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory(
|
||||
&project_path.path,
|
||||
path_prefix,
|
||||
path_style,
|
||||
);
|
||||
|
||||
let label =
|
||||
@@ -657,17 +662,22 @@ impl ContextPickerCompletionProvider {
|
||||
workspace: Entity<Workspace>,
|
||||
cx: &mut App,
|
||||
) -> Option<Completion> {
|
||||
let path_style = workspace.read(cx).path_style(cx);
|
||||
let SymbolLocation::InProject(symbol_path) = &symbol.path else {
|
||||
return None;
|
||||
};
|
||||
let path_prefix = workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(symbol.path.worktree_id, cx)?
|
||||
.worktree_for_id(symbol_path.worktree_id, cx)?
|
||||
.read(cx)
|
||||
.root_name();
|
||||
|
||||
let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory(
|
||||
&symbol.path.path,
|
||||
&symbol_path.path,
|
||||
path_prefix,
|
||||
path_style,
|
||||
);
|
||||
let full_path = if let Some(directory) = directory {
|
||||
format!("{}{}", directory, file_name)
|
||||
@@ -768,6 +778,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
let text_thread_store = self.text_thread_store.clone();
|
||||
let editor = self.editor.clone();
|
||||
let http_client = workspace.read(cx).client().http_client();
|
||||
let path_style = workspace.read(cx).path_style(cx);
|
||||
|
||||
let MentionCompletion { mode, argument, .. } = state;
|
||||
let query = argument.unwrap_or_else(|| "".to_string());
|
||||
@@ -834,6 +845,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
mat.is_dir,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
path_style,
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
cx,
|
||||
@@ -1064,7 +1076,7 @@ mod tests {
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{ops::Deref, rc::Rc};
|
||||
use util::path;
|
||||
use util::{path, rel_path::rel_path};
|
||||
use workspace::{AppState, Item};
|
||||
|
||||
#[test]
|
||||
@@ -1215,16 +1227,18 @@ mod tests {
|
||||
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
|
||||
|
||||
let paths = vec![
|
||||
path!("a/one.txt"),
|
||||
path!("a/two.txt"),
|
||||
path!("a/three.txt"),
|
||||
path!("a/four.txt"),
|
||||
path!("b/five.txt"),
|
||||
path!("b/six.txt"),
|
||||
path!("b/seven.txt"),
|
||||
path!("b/eight.txt"),
|
||||
rel_path("a/one.txt"),
|
||||
rel_path("a/two.txt"),
|
||||
rel_path("a/three.txt"),
|
||||
rel_path("a/four.txt"),
|
||||
rel_path("b/five.txt"),
|
||||
rel_path("b/six.txt"),
|
||||
rel_path("b/seven.txt"),
|
||||
rel_path("b/eight.txt"),
|
||||
];
|
||||
|
||||
let slash = PathStyle::local().separator();
|
||||
|
||||
let mut opened_editors = Vec::new();
|
||||
for path in paths {
|
||||
let buffer = workspace
|
||||
@@ -1232,7 +1246,7 @@ mod tests {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Path::new(path).into(),
|
||||
path: path.into(),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
@@ -1308,13 +1322,13 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
"seven.txt dir/b/",
|
||||
"six.txt dir/b/",
|
||||
"five.txt dir/b/",
|
||||
"four.txt dir/a/",
|
||||
"Files & Directories",
|
||||
"Symbols",
|
||||
"Fetch"
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
format!("four.txt dir{slash}a{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into(),
|
||||
"Fetch".into()
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -1341,7 +1355,10 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Lorem @file one");
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]);
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt dir{slash}a{slash}")]
|
||||
);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -1350,7 +1367,10 @@ mod tests {
|
||||
});
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Lorem [@one.txt](@file:dir/a/one.txt) ");
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
@@ -1361,7 +1381,10 @@ mod tests {
|
||||
cx.simulate_input(" ");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Lorem [@one.txt](@file:dir/a/one.txt) ");
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
@@ -1374,7 +1397,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum ",
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum "),
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
@@ -1388,7 +1411,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum @file ",
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum @file "),
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
@@ -1406,7 +1429,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt) "
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
@@ -1423,7 +1446,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt) \n@"
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n@")
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
@@ -1444,7 +1467,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt) \n[@six.txt](@file:dir/b/six.txt) "
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n[@six.txt](@file:dir{slash}b{slash}six.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
@@ -10,7 +9,7 @@ use gpui::{
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
|
||||
use ui::{ListItem, Tooltip, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
use util::{ResultExt as _, paths::PathStyle, rel_path::RelPath};
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context_picker::ContextPicker;
|
||||
@@ -161,6 +160,8 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let FileMatch { mat, .. } = &self.matches.get(ix)?;
|
||||
let workspace = self.workspace.upgrade()?;
|
||||
let path_style = workspace.read(cx).path_style(cx);
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
@@ -172,6 +173,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
&mat.path,
|
||||
&mat.path_prefix,
|
||||
mat.is_dir,
|
||||
path_style,
|
||||
self.context_store.clone(),
|
||||
cx,
|
||||
)),
|
||||
@@ -214,14 +216,13 @@ pub(crate) fn search_files(
|
||||
|
||||
let file_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.entries(false, 0).map(move |entry| FileMatch {
|
||||
mat: PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
path_prefix: worktree.root_name().into(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: entry.is_dir(),
|
||||
},
|
||||
@@ -251,7 +252,7 @@ pub(crate) fn search_files(
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
&None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
@@ -269,51 +270,31 @@ pub(crate) fn search_files(
|
||||
}
|
||||
|
||||
pub fn extract_file_name_and_directory(
|
||||
path: &Path,
|
||||
path_prefix: &str,
|
||||
path: &RelPath,
|
||||
path_prefix: &RelPath,
|
||||
path_style: PathStyle,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
if path == Path::new("") {
|
||||
(
|
||||
SharedString::from(
|
||||
path_prefix
|
||||
.trim_end_matches(std::path::MAIN_SEPARATOR)
|
||||
.to_string(),
|
||||
),
|
||||
None,
|
||||
)
|
||||
} else {
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
.into();
|
||||
|
||||
let mut directory = path_prefix
|
||||
.trim_end_matches(std::path::MAIN_SEPARATOR)
|
||||
.to_string();
|
||||
if !directory.ends_with('/') {
|
||||
directory.push('/');
|
||||
}
|
||||
if let Some(parent) = path.parent().filter(|parent| parent != &Path::new("")) {
|
||||
directory.push_str(&parent.to_string_lossy());
|
||||
directory.push('/');
|
||||
}
|
||||
|
||||
(file_name, Some(directory.into()))
|
||||
}
|
||||
let full_path = path_prefix.join(path);
|
||||
let file_name = full_path.file_name().unwrap_or_default();
|
||||
let display_path = full_path.display(path_style);
|
||||
let (directory, file_name) = display_path.split_at(display_path.len() - file_name.len());
|
||||
(
|
||||
file_name.to_string().into(),
|
||||
Some(SharedString::new(directory)).filter(|dir| !dir.is_empty()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn render_file_context_entry(
|
||||
id: ElementId,
|
||||
worktree_id: WorktreeId,
|
||||
path: &Arc<Path>,
|
||||
path_prefix: &Arc<str>,
|
||||
path: &Arc<RelPath>,
|
||||
path_prefix: &Arc<RelPath>,
|
||||
is_directory: bool,
|
||||
path_style: PathStyle,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
cx: &App,
|
||||
) -> Stateful<Div> {
|
||||
let (file_name, directory) = extract_file_name_and_directory(path, path_prefix);
|
||||
let (file_name, directory) = extract_file_name_and_directory(path, path_prefix, path_style);
|
||||
|
||||
let added = context_store.upgrade().and_then(|context_store| {
|
||||
let project_path = ProjectPath {
|
||||
@@ -330,9 +311,9 @@ pub fn render_file_context_entry(
|
||||
});
|
||||
|
||||
let file_icon = if is_directory {
|
||||
FileIcons::get_folder_icon(false, path, cx)
|
||||
FileIcons::get_folder_icon(false, path.as_std_path(), cx)
|
||||
} else {
|
||||
FileIcons::get_icon(path, cx)
|
||||
FileIcons::get_icon(path.as_std_path(), cx)
|
||||
}
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
|
||||
@@ -2,13 +2,14 @@ use std::cmp::Reverse;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Result, anyhow};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity,
|
||||
};
|
||||
use ordered_float::OrderedFloat;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::lsp_store::SymbolLocation;
|
||||
use project::{DocumentSymbol, Symbol};
|
||||
use ui::{ListItem, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
@@ -191,7 +192,10 @@ pub(crate) fn add_symbol(
|
||||
) -> Task<Result<(Option<AgentContextHandle>, bool)>> {
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let open_buffer_task = project.update(cx, |project, cx| {
|
||||
project.open_buffer(symbol.path.clone(), cx)
|
||||
let SymbolLocation::InProject(symbol_path) = &symbol.path else {
|
||||
return Task::ready(Err(anyhow!("can't add symbol from outside of project")));
|
||||
};
|
||||
project.open_buffer(symbol_path.clone(), cx)
|
||||
});
|
||||
cx.spawn(async move |cx| {
|
||||
let buffer = open_buffer_task.await?;
|
||||
@@ -291,10 +295,11 @@ pub(crate) fn search_symbols(
|
||||
.map(|(id, symbol)| {
|
||||
StringMatchCandidate::new(id, symbol.label.filter_text())
|
||||
})
|
||||
.partition(|candidate| {
|
||||
project
|
||||
.entry_for_path(&symbols[candidate.id].path, cx)
|
||||
.is_some_and(|e| !e.is_ignored)
|
||||
.partition(|candidate| match &symbols[candidate.id].path {
|
||||
SymbolLocation::InProject(project_path) => project
|
||||
.entry_for_path(project_path, cx)
|
||||
.is_some_and(|e| !e.is_ignored),
|
||||
SymbolLocation::OutsideProject { .. } => false,
|
||||
})
|
||||
})
|
||||
.log_err()
|
||||
@@ -360,13 +365,18 @@ fn compute_symbol_entries(
|
||||
}
|
||||
|
||||
pub fn render_symbol_context_entry(id: ElementId, entry: &SymbolEntry) -> Stateful<Div> {
|
||||
let path = entry
|
||||
.symbol
|
||||
.path
|
||||
.path
|
||||
.file_name()
|
||||
.map(|s| s.to_string_lossy())
|
||||
.unwrap_or_default();
|
||||
let path = match &entry.symbol.path {
|
||||
SymbolLocation::InProject(project_path) => {
|
||||
project_path.path.file_name().unwrap_or_default().into()
|
||||
}
|
||||
SymbolLocation::OutsideProject {
|
||||
abs_path,
|
||||
signature: _,
|
||||
} => abs_path
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy())
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
let symbol_location = format!("{} L{}", path, entry.symbol.range.start.0.row + 1);
|
||||
|
||||
h_flex()
|
||||
|
||||
@@ -144,8 +144,7 @@ impl InlineAssistant {
|
||||
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
let enabled = !DisableAiSettings::get_global(cx).disable_ai
|
||||
&& AgentSettings::get_global(cx).enabled;
|
||||
let enabled = AgentSettings::get_global(cx).enabled(cx);
|
||||
terminal_panel.update(cx, |terminal_panel, cx| {
|
||||
terminal_panel.set_assistant_enabled(enabled, cx)
|
||||
});
|
||||
@@ -257,8 +256,7 @@ impl InlineAssistant {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
if !settings.enabled || DisableAiSettings::get_global(cx).disable_ai {
|
||||
if !AgentSettings::get_global(cx).enabled(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1788,7 +1786,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
_: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<CodeAction>>> {
|
||||
if !AgentSettings::get_global(cx).enabled {
|
||||
if !AgentSettings::get_global(cx).enabled(cx) {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
}
|
||||
|
||||
|
||||
@@ -238,7 +238,7 @@ impl TerminalInlineAssistant {
|
||||
let latest_output = terminal.last_n_non_empty_lines(DEFAULT_CONTEXT_LINES);
|
||||
let working_directory = terminal
|
||||
.working_directory()
|
||||
.map(|path| path.to_string_lossy().to_string());
|
||||
.map(|path| path.to_string_lossy().into_owned());
|
||||
(latest_output, working_directory)
|
||||
})
|
||||
.ok()
|
||||
|
||||
@@ -1431,10 +1431,14 @@ impl TextThreadEditor {
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let worktree_root_name = worktree.read(cx).root_name().to_string();
|
||||
let mut full_path = PathBuf::from(worktree_root_name.clone());
|
||||
full_path.push(&project_path.path);
|
||||
file_slash_command_args.push(full_path.to_string_lossy().to_string());
|
||||
let path_style = worktree.read(cx).path_style();
|
||||
let full_path = worktree
|
||||
.read(cx)
|
||||
.root_name()
|
||||
.join(&project_path.path)
|
||||
.display(path_style)
|
||||
.into_owned();
|
||||
file_slash_command_args.push(full_path);
|
||||
}
|
||||
|
||||
let cmd_name = FileSlashCommand.name();
|
||||
|
||||
@@ -17,6 +17,7 @@ use agent::context::{
|
||||
FileContextHandle, ImageContext, ImageStatus, RulesContextHandle, SelectionContextHandle,
|
||||
SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle,
|
||||
};
|
||||
use util::paths::PathStyle;
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub enum ContextPill {
|
||||
@@ -303,33 +304,54 @@ impl AddedContext {
|
||||
cx: &App,
|
||||
) -> Option<AddedContext> {
|
||||
match handle {
|
||||
AgentContextHandle::File(handle) => Self::pending_file(handle, cx),
|
||||
AgentContextHandle::File(handle) => {
|
||||
Self::pending_file(handle, project.path_style(cx), cx)
|
||||
}
|
||||
AgentContextHandle::Directory(handle) => Self::pending_directory(handle, project, cx),
|
||||
AgentContextHandle::Symbol(handle) => Self::pending_symbol(handle, cx),
|
||||
AgentContextHandle::Selection(handle) => Self::pending_selection(handle, cx),
|
||||
AgentContextHandle::Symbol(handle) => {
|
||||
Self::pending_symbol(handle, project.path_style(cx), cx)
|
||||
}
|
||||
AgentContextHandle::Selection(handle) => {
|
||||
Self::pending_selection(handle, project.path_style(cx), cx)
|
||||
}
|
||||
AgentContextHandle::FetchedUrl(handle) => Some(Self::fetched_url(handle)),
|
||||
AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)),
|
||||
AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)),
|
||||
AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle, model, cx)),
|
||||
AgentContextHandle::Image(handle) => {
|
||||
Some(Self::image(handle, model, project.path_style(cx), cx))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pending_file(handle: FileContextHandle, cx: &App) -> Option<AddedContext> {
|
||||
let full_path = handle.buffer.read(cx).file()?.full_path(cx);
|
||||
Some(Self::file(handle, &full_path, cx))
|
||||
fn pending_file(
|
||||
handle: FileContextHandle,
|
||||
path_style: PathStyle,
|
||||
cx: &App,
|
||||
) -> Option<AddedContext> {
|
||||
let full_path = handle
|
||||
.buffer
|
||||
.read(cx)
|
||||
.file()?
|
||||
.full_path(cx)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
Some(Self::file(handle, &full_path, path_style, cx))
|
||||
}
|
||||
|
||||
fn file(handle: FileContextHandle, full_path: &Path, cx: &App) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
fn file(
|
||||
handle: FileContextHandle,
|
||||
full_path: &str,
|
||||
path_style: PathStyle,
|
||||
cx: &App,
|
||||
) -> AddedContext {
|
||||
let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, path_style);
|
||||
AddedContext {
|
||||
kind: ContextKind::File,
|
||||
name,
|
||||
parent,
|
||||
tooltip: Some(full_path_string),
|
||||
icon_path: FileIcons::get_icon(full_path, cx),
|
||||
tooltip: Some(SharedString::new(full_path)),
|
||||
icon_path: FileIcons::get_icon(Path::new(full_path), cx),
|
||||
status: ContextStatus::Ready,
|
||||
render_hover: None,
|
||||
handle: AgentContextHandle::File(handle),
|
||||
@@ -343,19 +365,24 @@ impl AddedContext {
|
||||
) -> Option<AddedContext> {
|
||||
let worktree = project.worktree_for_entry(handle.entry_id, cx)?.read(cx);
|
||||
let entry = worktree.entry_for_id(handle.entry_id)?;
|
||||
let full_path = worktree.full_path(&entry.path);
|
||||
Some(Self::directory(handle, &full_path))
|
||||
let full_path = worktree
|
||||
.full_path(&entry.path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
Some(Self::directory(handle, &full_path, project.path_style(cx)))
|
||||
}
|
||||
|
||||
fn directory(handle: DirectoryContextHandle, full_path: &Path) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
fn directory(
|
||||
handle: DirectoryContextHandle,
|
||||
full_path: &str,
|
||||
path_style: PathStyle,
|
||||
) -> AddedContext {
|
||||
let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, path_style);
|
||||
AddedContext {
|
||||
kind: ContextKind::Directory,
|
||||
name,
|
||||
parent,
|
||||
tooltip: Some(full_path_string),
|
||||
tooltip: Some(SharedString::new(full_path)),
|
||||
icon_path: None,
|
||||
status: ContextStatus::Ready,
|
||||
render_hover: None,
|
||||
@@ -363,9 +390,17 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn pending_symbol(handle: SymbolContextHandle, cx: &App) -> Option<AddedContext> {
|
||||
let excerpt =
|
||||
ContextFileExcerpt::new(&handle.full_path(cx)?, handle.enclosing_line_range(cx), cx);
|
||||
fn pending_symbol(
|
||||
handle: SymbolContextHandle,
|
||||
path_style: PathStyle,
|
||||
cx: &App,
|
||||
) -> Option<AddedContext> {
|
||||
let excerpt = ContextFileExcerpt::new(
|
||||
&handle.full_path(cx)?.to_string_lossy(),
|
||||
handle.enclosing_line_range(cx),
|
||||
path_style,
|
||||
cx,
|
||||
);
|
||||
Some(AddedContext {
|
||||
kind: ContextKind::Symbol,
|
||||
name: handle.symbol.clone(),
|
||||
@@ -383,8 +418,17 @@ impl AddedContext {
|
||||
})
|
||||
}
|
||||
|
||||
fn pending_selection(handle: SelectionContextHandle, cx: &App) -> Option<AddedContext> {
|
||||
let excerpt = ContextFileExcerpt::new(&handle.full_path(cx)?, handle.line_range(cx), cx);
|
||||
fn pending_selection(
|
||||
handle: SelectionContextHandle,
|
||||
path_style: PathStyle,
|
||||
cx: &App,
|
||||
) -> Option<AddedContext> {
|
||||
let excerpt = ContextFileExcerpt::new(
|
||||
&handle.full_path(cx)?.to_string_lossy(),
|
||||
handle.line_range(cx),
|
||||
path_style,
|
||||
cx,
|
||||
);
|
||||
Some(AddedContext {
|
||||
kind: ContextKind::Selection,
|
||||
name: excerpt.file_name_and_range.clone(),
|
||||
@@ -485,13 +529,13 @@ impl AddedContext {
|
||||
fn image(
|
||||
context: ImageContext,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
path_style: PathStyle,
|
||||
cx: &App,
|
||||
) -> AddedContext {
|
||||
let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
let icon_path = FileIcons::get_icon(full_path, cx);
|
||||
extract_file_name_and_directory_from_full_path(full_path, path_style);
|
||||
let icon_path = FileIcons::get_icon(Path::new(full_path), cx);
|
||||
(name, parent, icon_path)
|
||||
} else {
|
||||
("Image".into(), None, None)
|
||||
@@ -540,19 +584,20 @@ impl AddedContext {
|
||||
}
|
||||
|
||||
fn extract_file_name_and_directory_from_full_path(
|
||||
path: &Path,
|
||||
name_fallback: &SharedString,
|
||||
path: &str,
|
||||
path_style: PathStyle,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
let name = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| name_fallback.clone());
|
||||
let parent = path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
|
||||
(name, parent)
|
||||
let (parent, file_name) = path_style.split(path);
|
||||
let parent = parent.and_then(|parent| {
|
||||
let parent = parent.trim_end_matches(path_style.separator());
|
||||
let (_, parent) = path_style.split(parent);
|
||||
if parent.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SharedString::new(parent))
|
||||
}
|
||||
});
|
||||
(SharedString::new(file_name), parent)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -564,25 +609,25 @@ struct ContextFileExcerpt {
|
||||
}
|
||||
|
||||
impl ContextFileExcerpt {
|
||||
pub fn new(full_path: &Path, line_range: Range<Point>, cx: &App) -> Self {
|
||||
let full_path_string = full_path.to_string_lossy().into_owned();
|
||||
let file_name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
|
||||
pub fn new(full_path: &str, line_range: Range<Point>, path_style: PathStyle, cx: &App) -> Self {
|
||||
let (parent, file_name) = path_style.split(full_path);
|
||||
let line_range_text = format!(" ({}-{})", line_range.start.row + 1, line_range.end.row + 1);
|
||||
let mut full_path_and_range = full_path_string;
|
||||
let mut full_path_and_range = full_path.to_owned();
|
||||
full_path_and_range.push_str(&line_range_text);
|
||||
let mut file_name_and_range = file_name;
|
||||
let mut file_name_and_range = file_name.to_owned();
|
||||
file_name_and_range.push_str(&line_range_text);
|
||||
|
||||
let parent_name = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let parent_name = parent.and_then(|parent| {
|
||||
let parent = parent.trim_end_matches(path_style.separator());
|
||||
let (_, parent) = path_style.split(parent);
|
||||
if parent.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SharedString::new(parent))
|
||||
}
|
||||
});
|
||||
|
||||
let icon_path = FileIcons::get_icon(full_path, cx);
|
||||
let icon_path = FileIcons::get_icon(Path::new(full_path), cx);
|
||||
|
||||
ContextFileExcerpt {
|
||||
file_name_and_range: file_name_and_range.into(),
|
||||
@@ -690,6 +735,7 @@ impl Component for AddedContext {
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
},
|
||||
None,
|
||||
PathStyle::local(),
|
||||
cx,
|
||||
),
|
||||
);
|
||||
@@ -710,6 +756,7 @@ impl Component for AddedContext {
|
||||
.shared(),
|
||||
},
|
||||
None,
|
||||
PathStyle::local(),
|
||||
cx,
|
||||
),
|
||||
);
|
||||
@@ -725,6 +772,7 @@ impl Component for AddedContext {
|
||||
image_task: Task::ready(None).shared(),
|
||||
},
|
||||
None,
|
||||
PathStyle::local(),
|
||||
cx,
|
||||
),
|
||||
);
|
||||
@@ -767,7 +815,8 @@ mod tests {
|
||||
full_path: None,
|
||||
};
|
||||
|
||||
let added_context = AddedContext::image(image_context, Some(&model), cx);
|
||||
let added_context =
|
||||
AddedContext::image(image_context, Some(&model), PathStyle::local(), cx);
|
||||
|
||||
assert!(matches!(
|
||||
added_context.status,
|
||||
@@ -790,7 +839,7 @@ mod tests {
|
||||
full_path: None,
|
||||
};
|
||||
|
||||
let added_context = AddedContext::image(image_context, None, cx);
|
||||
let added_context = AddedContext::image(image_context, None, PathStyle::local(), cx);
|
||||
|
||||
assert!(
|
||||
matches!(added_context.status, ContextStatus::Ready),
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use ai_onboarding::{AgentPanelOnboardingCard, PlanDefinitions};
|
||||
use client::zed_urls;
|
||||
use cloud_llm_client::{Plan, PlanV1};
|
||||
use cloud_llm_client::{Plan, PlanV2};
|
||||
use gpui::{AnyElement, App, IntoElement, RenderOnce, Window};
|
||||
use ui::{Divider, Tooltip, prelude::*};
|
||||
|
||||
@@ -112,7 +112,7 @@ impl Component for EndTrialUpsell {
|
||||
Some(
|
||||
v_flex()
|
||||
.child(EndTrialUpsell {
|
||||
plan: Plan::V1(PlanV1::ZedFree),
|
||||
plan: Plan::V2(PlanV2::ZedFree),
|
||||
dismiss_upsell: Arc::new(|_, _| {}),
|
||||
})
|
||||
.into_any_element(),
|
||||
|
||||
@@ -40,7 +40,7 @@ impl AgentOnboardingModal {
|
||||
}
|
||||
|
||||
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.open_url("http://zed.dev/blog/fastest-ai-code-editor");
|
||||
cx.open_url("https://zed.dev/blog/fastest-ai-code-editor");
|
||||
cx.notify();
|
||||
|
||||
agent_onboarding_event!("Blog Link Clicked");
|
||||
|
||||
@@ -18,7 +18,6 @@ default = []
|
||||
client.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
component.workspace = true
|
||||
feature_flags.workspace = true
|
||||
gpui.workspace = true
|
||||
language_model.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -18,7 +18,6 @@ pub use young_account_banner::YoungAccountBanner;
|
||||
use std::sync::Arc;
|
||||
|
||||
use client::{Client, UserStore, zed_urls};
|
||||
use feature_flags::{BillingV2FeatureFlag, FeatureFlagAppExt as _};
|
||||
use gpui::{AnyElement, Entity, IntoElement, ParentElement};
|
||||
use ui::{Divider, RegisterComponent, Tooltip, prelude::*};
|
||||
|
||||
@@ -85,7 +84,7 @@ impl ZedAiOnboarding {
|
||||
self
|
||||
}
|
||||
|
||||
fn render_sign_in_disclaimer(&self, cx: &mut App) -> AnyElement {
|
||||
fn render_sign_in_disclaimer(&self, _cx: &mut App) -> AnyElement {
|
||||
let signing_in = matches!(self.sign_in_status, SignInStatus::SigningIn);
|
||||
|
||||
v_flex()
|
||||
@@ -96,7 +95,7 @@ impl ZedAiOnboarding {
|
||||
.color(Color::Muted)
|
||||
.mb_2(),
|
||||
)
|
||||
.child(PlanDefinitions.pro_plan(cx.has_flag::<BillingV2FeatureFlag>(), false))
|
||||
.child(PlanDefinitions.pro_plan(true, false))
|
||||
.child(
|
||||
Button::new("sign_in", "Try Zed Pro for Free")
|
||||
.disabled(signing_in)
|
||||
@@ -307,7 +306,7 @@ impl RenderOnce for ZedAiOnboarding {
|
||||
fn render(self, _window: &mut ui::Window, cx: &mut App) -> impl IntoElement {
|
||||
if matches!(self.sign_in_status, SignInStatus::SignedIn) {
|
||||
match self.plan {
|
||||
None => self.render_free_plan_state(cx.has_flag::<BillingV2FeatureFlag>(), cx),
|
||||
None => self.render_free_plan_state(true, cx),
|
||||
Some(plan @ (Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree))) => {
|
||||
self.render_free_plan_state(plan.is_v2(), cx)
|
||||
}
|
||||
@@ -372,7 +371,7 @@ impl Component for ZedAiOnboarding {
|
||||
"Free Plan",
|
||||
onboarding(
|
||||
SignInStatus::SignedIn,
|
||||
Some(Plan::V1(PlanV1::ZedFree)),
|
||||
Some(Plan::V2(PlanV2::ZedFree)),
|
||||
false,
|
||||
),
|
||||
),
|
||||
@@ -380,7 +379,7 @@ impl Component for ZedAiOnboarding {
|
||||
"Pro Trial",
|
||||
onboarding(
|
||||
SignInStatus::SignedIn,
|
||||
Some(Plan::V1(PlanV1::ZedProTrial)),
|
||||
Some(Plan::V2(PlanV2::ZedProTrial)),
|
||||
false,
|
||||
),
|
||||
),
|
||||
@@ -388,7 +387,7 @@ impl Component for ZedAiOnboarding {
|
||||
"Pro Plan",
|
||||
onboarding(
|
||||
SignInStatus::SignedIn,
|
||||
Some(Plan::V1(PlanV1::ZedPro)),
|
||||
Some(Plan::V2(PlanV2::ZedPro)),
|
||||
false,
|
||||
),
|
||||
),
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::sync::Arc;
|
||||
|
||||
use client::{Client, UserStore, zed_urls};
|
||||
use cloud_llm_client::{Plan, PlanV1, PlanV2};
|
||||
use feature_flags::{BillingV2FeatureFlag, FeatureFlagAppExt};
|
||||
use gpui::{AnyElement, App, Entity, IntoElement, RenderOnce, Window};
|
||||
use ui::{CommonAnimationExt, Divider, Vector, VectorName, prelude::*};
|
||||
|
||||
@@ -50,9 +49,7 @@ impl AiUpsellCard {
|
||||
|
||||
impl RenderOnce for AiUpsellCard {
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
let is_v2_plan = self
|
||||
.user_plan
|
||||
.map_or(cx.has_flag::<BillingV2FeatureFlag>(), |plan| plan.is_v2());
|
||||
let is_v2_plan = self.user_plan.map_or(true, |plan| plan.is_v2());
|
||||
|
||||
let pro_section = v_flex()
|
||||
.flex_grow()
|
||||
@@ -215,7 +212,7 @@ impl RenderOnce for AiUpsellCard {
|
||||
.child(
|
||||
footer_container
|
||||
.child(
|
||||
Button::new("start_trial", "Start 14-day Free Pro Trial")
|
||||
Button::new("start_trial", "Start Pro Trial")
|
||||
.full_width()
|
||||
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
|
||||
.when_some(self.tab_index, |this, tab_index| {
|
||||
@@ -230,7 +227,7 @@ impl RenderOnce for AiUpsellCard {
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Label::new("No credit card required")
|
||||
Label::new("14 days, no credit card required")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
@@ -327,7 +324,7 @@ impl Component for AiUpsellCard {
|
||||
sign_in_status: SignInStatus::SignedIn,
|
||||
sign_in: Arc::new(|_, _| {}),
|
||||
account_too_young: false,
|
||||
user_plan: Some(Plan::V1(PlanV1::ZedFree)),
|
||||
user_plan: Some(Plan::V2(PlanV2::ZedFree)),
|
||||
tab_index: Some(1),
|
||||
}
|
||||
.into_any_element(),
|
||||
@@ -338,7 +335,7 @@ impl Component for AiUpsellCard {
|
||||
sign_in_status: SignInStatus::SignedIn,
|
||||
sign_in: Arc::new(|_, _| {}),
|
||||
account_too_young: true,
|
||||
user_plan: Some(Plan::V1(PlanV1::ZedFree)),
|
||||
user_plan: Some(Plan::V2(PlanV2::ZedFree)),
|
||||
tab_index: Some(1),
|
||||
}
|
||||
.into_any_element(),
|
||||
@@ -349,7 +346,7 @@ impl Component for AiUpsellCard {
|
||||
sign_in_status: SignInStatus::SignedIn,
|
||||
sign_in: Arc::new(|_, _| {}),
|
||||
account_too_young: false,
|
||||
user_plan: Some(Plan::V1(PlanV1::ZedProTrial)),
|
||||
user_plan: Some(Plan::V2(PlanV2::ZedProTrial)),
|
||||
tab_index: Some(1),
|
||||
}
|
||||
.into_any_element(),
|
||||
@@ -360,7 +357,7 @@ impl Component for AiUpsellCard {
|
||||
sign_in_status: SignInStatus::SignedIn,
|
||||
sign_in: Arc::new(|_, _| {}),
|
||||
account_too_young: false,
|
||||
user_plan: Some(Plan::V1(PlanV1::ZedPro)),
|
||||
user_plan: Some(Plan::V2(PlanV2::ZedPro)),
|
||||
tab_index: Some(1),
|
||||
}
|
||||
.into_any_element(),
|
||||
|
||||
@@ -7,33 +7,62 @@ pub struct PlanDefinitions;
|
||||
impl PlanDefinitions {
|
||||
pub const AI_DESCRIPTION: &'static str = "Zed offers a complete agentic experience, with robust editing and reviewing features to collaborate with AI.";
|
||||
|
||||
pub fn free_plan(&self, _is_v2: bool) -> impl IntoElement {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("50 prompts with Claude models"))
|
||||
.child(ListBulletItem::new("2,000 accepted edit predictions"))
|
||||
}
|
||||
|
||||
pub fn pro_trial(&self, _is_v2: bool, period: bool) -> impl IntoElement {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("150 prompts with Claude models"))
|
||||
.child(ListBulletItem::new(
|
||||
"Unlimited edit predictions with Zeta, our open-source model",
|
||||
))
|
||||
.when(period, |this| {
|
||||
this.child(ListBulletItem::new(
|
||||
"Try it out for 14 days for free, no credit card required",
|
||||
pub fn free_plan(&self, is_v2: bool) -> impl IntoElement {
|
||||
if is_v2 {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("2,000 accepted edit predictions"))
|
||||
.child(ListBulletItem::new(
|
||||
"Unlimited prompts with your AI API keys",
|
||||
))
|
||||
})
|
||||
.child(ListBulletItem::new(
|
||||
"Unlimited use of external agents like Claude Code",
|
||||
))
|
||||
} else {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("50 prompts with Claude models"))
|
||||
.child(ListBulletItem::new("2,000 accepted edit predictions"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pro_plan(&self, _is_v2: bool, price: bool) -> impl IntoElement {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("500 prompts with Claude models"))
|
||||
.child(ListBulletItem::new(
|
||||
"Unlimited edit predictions with Zeta, our open-source model",
|
||||
))
|
||||
.when(price, |this| {
|
||||
this.child(ListBulletItem::new("$20 USD per month"))
|
||||
})
|
||||
pub fn pro_trial(&self, is_v2: bool, period: bool) -> impl IntoElement {
|
||||
if is_v2 {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("Unlimited edit predictions"))
|
||||
.child(ListBulletItem::new("$20 of tokens"))
|
||||
.when(period, |this| {
|
||||
this.child(ListBulletItem::new(
|
||||
"Try it out for 14 days, no credit card required",
|
||||
))
|
||||
})
|
||||
} else {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("150 prompts with Claude models"))
|
||||
.child(ListBulletItem::new(
|
||||
"Unlimited edit predictions with Zeta, our open-source model",
|
||||
))
|
||||
.when(period, |this| {
|
||||
this.child(ListBulletItem::new(
|
||||
"Try it out for 14 days, no credit card required",
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pro_plan(&self, is_v2: bool, price: bool) -> impl IntoElement {
|
||||
if is_v2 {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("Unlimited edit predictions"))
|
||||
.child(ListBulletItem::new("$5 of tokens"))
|
||||
.child(ListBulletItem::new("Usage-based billing beyond $5"))
|
||||
} else {
|
||||
List::new()
|
||||
.child(ListBulletItem::new("500 prompts with Claude models"))
|
||||
.child(ListBulletItem::new(
|
||||
"Unlimited edit predictions with Zeta, our open-source model",
|
||||
))
|
||||
.when(price, |this| {
|
||||
this.child(ListBulletItem::new("$20 USD per month"))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ pub struct YoungAccountBanner;
|
||||
|
||||
impl RenderOnce for YoungAccountBanner {
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, GitHub accounts created fewer than 30 days ago are not eligible for free plan usage or Pro plan free trial. To request an exception, reach out to billing-support@zed.dev.";
|
||||
const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, GitHub accounts created fewer than 30 days ago are not eligible for the Pro trial. You can request an exception by reaching out to billing-support@zed.dev";
|
||||
|
||||
let label = div()
|
||||
.w_full()
|
||||
|
||||
@@ -67,7 +67,6 @@ pub enum Model {
|
||||
alias = "claude-opus-4-1-thinking-latest"
|
||||
)]
|
||||
ClaudeOpus4_1Thinking,
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
|
||||
ClaudeSonnet4,
|
||||
#[serde(
|
||||
@@ -75,6 +74,14 @@ pub enum Model {
|
||||
alias = "claude-sonnet-4-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4Thinking,
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
|
||||
ClaudeSonnet4_5,
|
||||
#[serde(
|
||||
rename = "claude-sonnet-4-5-thinking",
|
||||
alias = "claude-sonnet-4-5-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4_5Thinking,
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
@@ -133,6 +140,14 @@ impl Model {
|
||||
return Ok(Self::ClaudeOpus4);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-5-thinking") {
|
||||
return Ok(Self::ClaudeSonnet4_5Thinking);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-5") {
|
||||
return Ok(Self::ClaudeSonnet4_5);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-thinking") {
|
||||
return Ok(Self::ClaudeSonnet4Thinking);
|
||||
}
|
||||
@@ -180,6 +195,8 @@ impl Model {
|
||||
Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking-latest",
|
||||
Self::ClaudeSonnet4 => "claude-sonnet-4-latest",
|
||||
Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
|
||||
Self::ClaudeSonnet4_5 => "claude-sonnet-4-5-latest",
|
||||
Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking-latest",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Self::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
@@ -197,6 +214,7 @@ impl Model {
|
||||
Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514",
|
||||
Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-20250805",
|
||||
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
|
||||
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-20250929",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Self::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
@@ -215,6 +233,8 @@ impl Model {
|
||||
Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
|
||||
Self::ClaudeSonnet4 => "Claude Sonnet 4",
|
||||
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
|
||||
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
|
||||
Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
@@ -236,6 +256,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
@@ -261,6 +283,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
@@ -280,6 +304,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -299,6 +325,8 @@ impl Model {
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -318,6 +346,7 @@ impl Model {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4_1
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
@@ -327,6 +356,7 @@ impl Model {
|
||||
Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeOpus4_1Thinking
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
|
||||
budget_tokens: Some(4_096),
|
||||
},
|
||||
|
||||
@@ -16,8 +16,12 @@ anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
net.workspace = true
|
||||
parking_lot.workspace = true
|
||||
proto.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
zeroize.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
windows.workspace = true
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
mod encrypted_password;
|
||||
|
||||
pub use encrypted_password::{EncryptedPassword, ProcessExt};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::sync::OnceLock;
|
||||
use std::{ffi::OsStr, time::Duration};
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
@@ -10,6 +16,8 @@ use gpui::{AsyncApp, BackgroundExecutor, Task};
|
||||
use smol::fs;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::encrypted_password::decrypt;
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum AskPassResult {
|
||||
CancelledByUser,
|
||||
@@ -17,16 +25,19 @@ pub enum AskPassResult {
|
||||
}
|
||||
|
||||
pub struct AskPassDelegate {
|
||||
tx: mpsc::UnboundedSender<(String, oneshot::Sender<String>)>,
|
||||
tx: mpsc::UnboundedSender<(String, oneshot::Sender<EncryptedPassword>)>,
|
||||
_task: Task<()>,
|
||||
}
|
||||
|
||||
impl AskPassDelegate {
|
||||
pub fn new(
|
||||
cx: &mut AsyncApp,
|
||||
password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static,
|
||||
password_prompt: impl Fn(String, oneshot::Sender<EncryptedPassword>, &mut AsyncApp)
|
||||
+ Send
|
||||
+ Sync
|
||||
+ 'static,
|
||||
) -> Self {
|
||||
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
|
||||
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<_>)>();
|
||||
let task = cx.spawn(async move |cx: &mut AsyncApp| {
|
||||
while let Some((prompt, channel)) = rx.next().await {
|
||||
password_prompt(prompt, channel, cx);
|
||||
@@ -35,7 +46,7 @@ impl AskPassDelegate {
|
||||
Self { tx, _task: task }
|
||||
}
|
||||
|
||||
pub async fn ask_password(&mut self, prompt: String) -> Result<String> {
|
||||
pub async fn ask_password(&mut self, prompt: String) -> Result<EncryptedPassword> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.tx.send((prompt, tx)).await?;
|
||||
Ok(rx.await?)
|
||||
@@ -48,7 +59,7 @@ pub struct AskPassSession {
|
||||
#[cfg(target_os = "windows")]
|
||||
askpass_helper: String,
|
||||
#[cfg(target_os = "windows")]
|
||||
secret: std::sync::Arc<parking_lot::Mutex<String>>,
|
||||
secret: std::sync::Arc<OnceLock<EncryptedPassword>>,
|
||||
_askpass_task: Task<()>,
|
||||
askpass_opened_rx: Option<oneshot::Receiver<()>>,
|
||||
askpass_kill_master_rx: Option<oneshot::Receiver<()>>,
|
||||
@@ -68,7 +79,7 @@ impl AskPassSession {
|
||||
use util::fs::make_file_executable;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let secret = std::sync::Arc::new(parking_lot::Mutex::new(String::new()));
|
||||
let secret = std::sync::Arc::new(OnceLock::new());
|
||||
let temp_dir = tempfile::Builder::new().prefix("zed-askpass").tempdir()?;
|
||||
let askpass_socket = temp_dir.path().join("askpass.sock");
|
||||
let askpass_script_path = temp_dir.path().join(ASKPASS_SCRIPT_NAME);
|
||||
@@ -104,10 +115,12 @@ impl AskPassSession {
|
||||
.context("getting askpass password")
|
||||
.log_err()
|
||||
{
|
||||
stream.write_all(password.as_bytes()).await.log_err();
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
*askpass_secret.lock() = password;
|
||||
askpass_secret.get_or_init(|| password.clone());
|
||||
}
|
||||
if let Ok(decrypted) = decrypt(password) {
|
||||
stream.write_all(decrypted.as_bytes()).await.log_err();
|
||||
}
|
||||
} else {
|
||||
if let Some(kill_tx) = kill_tx.take() {
|
||||
@@ -188,8 +201,8 @@ impl AskPassSession {
|
||||
|
||||
/// This will return the password that was last set by the askpass script.
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn get_password(&self) -> String {
|
||||
self.secret.lock().clone()
|
||||
pub fn get_password(&self) -> Option<EncryptedPassword> {
|
||||
self.secret.get().cloned()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
116
crates/askpass/src/encrypted_password.rs
Normal file
116
crates/askpass/src/encrypted_password.rs
Normal file
@@ -0,0 +1,116 @@
|
||||
//! This module provides [EncryptedPassword] for storage of passwords in memory.
|
||||
//! On Windows that's implemented with CryptProtectMemory/CryptUnprotectMemory; on other platforms it just falls through
|
||||
//! to string for now.
|
||||
//!
|
||||
//! The "safety" of this module lies in exploiting visibility rules of Rust:
|
||||
//! 1. No outside module has access to the internal representation of [EncryptedPassword].
|
||||
//! 2. [EncryptedPassword] cannot be converted into a [String] or any other plaintext representation.
|
||||
//! All use cases that do need such functionality (of which we have two right now) are implemented within this module.
|
||||
//!
|
||||
//! Note that this is not bulletproof.
|
||||
//! 1. [ProcessExt] is implemented for [smol::process::Command], which is a builder for smol processes.
|
||||
//! Before the process itself is spawned the contents of [EncryptedPassword] are unencrypted in env var storage of said builder.
|
||||
//! 2. We're also sending plaintext passwords over RPC with [proto::AskPassResponse]. Go figure how great that is.
|
||||
//!
|
||||
//! Still, the goal of this module is to not have passwords laying around nilly-willy in memory.
|
||||
//! We do not claim that it is fool-proof.
|
||||
use anyhow::Result;
|
||||
use zeroize::Zeroize;
|
||||
|
||||
type LengthWithoutPadding = u32;
|
||||
#[derive(Clone)]
|
||||
pub struct EncryptedPassword(Vec<u8>, LengthWithoutPadding);
|
||||
|
||||
pub trait ProcessExt {
|
||||
fn encrypted_env(&mut self, name: &str, value: EncryptedPassword) -> &mut Self;
|
||||
}
|
||||
|
||||
impl ProcessExt for smol::process::Command {
|
||||
fn encrypted_env(&mut self, name: &str, value: EncryptedPassword) -> &mut Self {
|
||||
if let Ok(password) = decrypt(value) {
|
||||
self.env(name, password);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<EncryptedPassword> for proto::AskPassResponse {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(pw: EncryptedPassword) -> Result<Self, Self::Error> {
|
||||
let pw = decrypt(pw)?;
|
||||
Ok(Self { response: pw })
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for EncryptedPassword {
|
||||
fn drop(&mut self) {
|
||||
self.0.zeroize();
|
||||
self.1.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for EncryptedPassword {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(password: &str) -> Result<EncryptedPassword> {
|
||||
let len: u32 = password.len().try_into()?;
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use windows::Win32::Security::Cryptography::{
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE, CRYPTPROTECTMEMORY_SAME_PROCESS, CryptProtectMemory,
|
||||
};
|
||||
let mut value = password.bytes().collect::<Vec<_>>();
|
||||
let padded_length = len.next_multiple_of(CRYPTPROTECTMEMORY_BLOCK_SIZE);
|
||||
if padded_length != len {
|
||||
value.resize(padded_length as usize, 0);
|
||||
}
|
||||
if len != 0 {
|
||||
unsafe {
|
||||
CryptProtectMemory(
|
||||
value.as_mut_ptr() as _,
|
||||
len,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Ok(Self(value, len))
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
Ok(Self(String::from(password).into(), len))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt(mut password: EncryptedPassword) -> Result<String> {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use anyhow::Context;
|
||||
use windows::Win32::Security::Cryptography::{
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE, CRYPTPROTECTMEMORY_SAME_PROCESS, CryptUnprotectMemory,
|
||||
};
|
||||
assert_eq!(
|
||||
password.0.len() % CRYPTPROTECTMEMORY_BLOCK_SIZE as usize,
|
||||
0,
|
||||
"Violated pre-condition (buffer size <{}> must be a multiple of CRYPTPROTECTMEMORY_BLOCK_SIZE <{}>) for CryptUnprotectMemory.",
|
||||
password.0.len(),
|
||||
CRYPTPROTECTMEMORY_BLOCK_SIZE
|
||||
);
|
||||
if password.1 != 0 {
|
||||
unsafe {
|
||||
CryptUnprotectMemory(
|
||||
password.0.as_mut_ptr() as _,
|
||||
password.1,
|
||||
CRYPTPROTECTMEMORY_SAME_PROCESS,
|
||||
)
|
||||
.context("while decrypting a SSH password")?
|
||||
};
|
||||
|
||||
{
|
||||
// Remove padding
|
||||
_ = password.0.drain(password.1 as usize..);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(std::mem::take(&mut password.0))?)
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
Ok(String::from_utf8(std::mem::take(&mut password.0))?)
|
||||
}
|
||||
@@ -2445,7 +2445,7 @@ impl AssistantContext {
|
||||
.message_anchors
|
||||
.get(next_message_ix)
|
||||
.map_or(buffer.len(), |message| {
|
||||
buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
|
||||
buffer.clip_offset(message.start.to_previous_offset(buffer), Bias::Left)
|
||||
});
|
||||
Some(self.insert_message_at_offset(offset, role, status, cx))
|
||||
} else {
|
||||
@@ -2669,7 +2669,7 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
pub fn summarize(&mut self, mut replace_old: bool, cx: &mut Context<Self>) {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).default_model() else {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).thread_summary_model() else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
||||
@@ -1329,13 +1329,12 @@ fn setup_context_editor_with_fake_model(
|
||||
cx.update(|cx| {
|
||||
init_test(cx);
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_default_model(
|
||||
Some(ConfiguredModel {
|
||||
provider: fake_provider.clone(),
|
||||
model: fake_model.clone(),
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
let configured_model = ConfiguredModel {
|
||||
provider: fake_provider.clone(),
|
||||
model: fake_model.clone(),
|
||||
};
|
||||
registry.set_default_model(Some(configured_model.clone()), cx);
|
||||
registry.set_thread_summary_model(Some(configured_model), cx);
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ parking_lot.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, atomic::AtomicBool};
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use extension::{Extension, ExtensionHostProxy, ExtensionSlashCommandProxy, WorktreeDelegate};
|
||||
use gpui::{App, Task, WeakEntity, Window};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use std::sync::{Arc, atomic::AtomicBool};
|
||||
use ui::prelude::*;
|
||||
use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::{
|
||||
@@ -51,10 +50,10 @@ impl WorktreeDelegate for WorktreeDelegateAdapter {
|
||||
}
|
||||
|
||||
fn root_path(&self) -> String {
|
||||
self.0.worktree_root_path().to_string_lossy().to_string()
|
||||
self.0.worktree_root_path().to_string_lossy().into_owned()
|
||||
}
|
||||
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String> {
|
||||
async fn read_text_file(&self, path: &RelPath) -> Result<String> {
|
||||
self.0.read_text_file(path).await
|
||||
}
|
||||
|
||||
@@ -62,7 +61,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter {
|
||||
self.0
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.map(|path| path.to_string_lossy().into_owned())
|
||||
}
|
||||
|
||||
async fn shell_env(&self) -> Vec<(String, String)> {
|
||||
|
||||
@@ -41,6 +41,9 @@ worktree.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
fs = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
settings.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
|
||||
159
crates/assistant_slash_commands/src/cargo_workspace_command.rs
Normal file
159
crates/assistant_slash_commands/src/cargo_workspace_command.rs
Normal file
@@ -0,0 +1,159 @@
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
use fs::Fs;
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct CargoWorkspaceSlashCommand;
|
||||
|
||||
impl CargoWorkspaceSlashCommand {
|
||||
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
|
||||
let buffer = fs.load(path_to_cargo_toml).await?;
|
||||
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
|
||||
|
||||
let mut message = String::new();
|
||||
writeln!(message, "You are in a Rust project.")?;
|
||||
|
||||
if let Some(workspace) = cargo_toml.workspace {
|
||||
writeln!(
|
||||
message,
|
||||
"The project is a Cargo workspace with the following members:"
|
||||
)?;
|
||||
for member in workspace.members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
|
||||
if !workspace.default_members.is_empty() {
|
||||
writeln!(message, "The default members are:")?;
|
||||
for member in workspace.default_members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !workspace.dependencies.is_empty() {
|
||||
writeln!(
|
||||
message,
|
||||
"The following workspace dependencies are installed:"
|
||||
)?;
|
||||
for dependency in workspace.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
} else if let Some(package) = cargo_toml.package {
|
||||
writeln!(
|
||||
message,
|
||||
"The project name is \"{name}\".",
|
||||
name = package.name
|
||||
)?;
|
||||
|
||||
let description = package
|
||||
.description
|
||||
.as_ref()
|
||||
.and_then(|description| description.get().ok().cloned());
|
||||
if let Some(description) = description.as_ref() {
|
||||
writeln!(message, "It describes itself as \"{description}\".")?;
|
||||
}
|
||||
|
||||
if !cargo_toml.dependencies.is_empty() {
|
||||
writeln!(message, "The following dependencies are installed:")?;
|
||||
for dependency in cargo_toml.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
fn path_to_cargo_toml(project: Entity<Project>, cx: &mut App) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path(RelPath::new("Cargo.toml").unwrap())?;
|
||||
let path = ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
};
|
||||
Some(Arc::from(
|
||||
project.read(cx).absolute_path(&path, cx)?.as_path(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for CargoWorkspaceSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"cargo-workspace".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert project workspace metadata".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Project Workspace Metadata".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakEntity<Workspace>>,
|
||||
_window: &mut Window,
|
||||
_cx: &mut App,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
_window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let fs = workspace.project().read(cx).fs().clone();
|
||||
let path = Self::path_to_cargo_toml(project, cx);
|
||||
let output = cx.background_spawn(async move {
|
||||
let path = path.with_context(|| "Cargo.toml not found")?;
|
||||
Self::build_message(fs, &path).await
|
||||
});
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = output.await?;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::FileTree,
|
||||
label: "Project".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}
|
||||
.into_event_stream())
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
@@ -13,12 +13,12 @@ use project::{DiagnosticSummary, PathMatchCandidateSet, Project};
|
||||
use rope::Point;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::{Path, PathBuf},
|
||||
path::Path,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use util::paths::PathMatcher;
|
||||
use util::paths::{PathMatcher, PathStyle};
|
||||
use util::{ResultExt, rel_path::RelPath};
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::create_label_for_command;
|
||||
@@ -36,7 +36,7 @@ impl DiagnosticsSlashCommand {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let entries = workspace.recent_navigation_history(Some(10), cx);
|
||||
let path_prefix: Arc<str> = Arc::default();
|
||||
let path_prefix: Arc<RelPath> = RelPath::empty().into();
|
||||
Task::ready(
|
||||
entries
|
||||
.into_iter()
|
||||
@@ -73,7 +73,7 @@ impl DiagnosticsSlashCommand {
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
&None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
@@ -125,6 +125,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
let path_style = workspace.read(cx).project().read(cx).path_style(cx);
|
||||
let query = arguments.last().cloned().unwrap_or_default();
|
||||
|
||||
let paths = self.search_paths(query.clone(), cancellation_flag.clone(), &workspace, cx);
|
||||
@@ -134,11 +135,11 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|path_match| {
|
||||
format!(
|
||||
"{}{}",
|
||||
path_match.path_prefix,
|
||||
path_match.path.to_string_lossy()
|
||||
)
|
||||
path_match
|
||||
.path_prefix
|
||||
.join(&path_match.path)
|
||||
.display(path_style)
|
||||
.to_string()
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -183,9 +184,11 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
|
||||
let options = Options::parse(arguments);
|
||||
let project = workspace.read(cx).project();
|
||||
let path_style = project.read(cx).path_style(cx);
|
||||
let options = Options::parse(arguments, path_style);
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
let task = collect_diagnostics(project.clone(), options, cx);
|
||||
|
||||
window.spawn(cx, async move |_| {
|
||||
task.await?
|
||||
@@ -204,14 +207,14 @@ struct Options {
|
||||
const INCLUDE_WARNINGS_ARGUMENT: &str = "--include-warnings";
|
||||
|
||||
impl Options {
|
||||
fn parse(arguments: &[String]) -> Self {
|
||||
fn parse(arguments: &[String], path_style: PathStyle) -> Self {
|
||||
let mut include_warnings = false;
|
||||
let mut path_matcher = None;
|
||||
for arg in arguments {
|
||||
if arg == INCLUDE_WARNINGS_ARGUMENT {
|
||||
include_warnings = true;
|
||||
} else {
|
||||
path_matcher = PathMatcher::new(&[arg.to_owned()]).log_err();
|
||||
path_matcher = PathMatcher::new(&[arg.to_owned()], path_style).log_err();
|
||||
}
|
||||
}
|
||||
Self {
|
||||
@@ -237,21 +240,15 @@ fn collect_diagnostics(
|
||||
None
|
||||
};
|
||||
|
||||
let path_style = project.read(cx).path_style(cx);
|
||||
let glob_is_exact_file_match = if let Some(path) = options
|
||||
.path_matcher
|
||||
.as_ref()
|
||||
.and_then(|pm| pm.sources().first())
|
||||
{
|
||||
PathBuf::try_from(path)
|
||||
.ok()
|
||||
.and_then(|path| {
|
||||
project.read(cx).worktrees(cx).find_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let worktree_root_path = Path::new(worktree.root_name());
|
||||
let relative_path = path.strip_prefix(worktree_root_path).ok()?;
|
||||
worktree.absolutize(relative_path).ok()
|
||||
})
|
||||
})
|
||||
project
|
||||
.read(cx)
|
||||
.find_project_path(Path::new(path), cx)
|
||||
.is_some()
|
||||
} else {
|
||||
false
|
||||
@@ -263,9 +260,8 @@ fn collect_diagnostics(
|
||||
.diagnostic_summaries(false, cx)
|
||||
.flat_map(|(path, _, summary)| {
|
||||
let worktree = project.read(cx).worktree_for_id(path.worktree_id, cx)?;
|
||||
let mut path_buf = PathBuf::from(worktree.read(cx).root_name());
|
||||
path_buf.push(&path.path);
|
||||
Some((path, path_buf, summary))
|
||||
let full_path = worktree.read(cx).root_name().join(&path.path);
|
||||
Some((path, full_path, summary))
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -281,7 +277,7 @@ fn collect_diagnostics(
|
||||
let mut project_summary = DiagnosticSummary::default();
|
||||
for (project_path, path, summary) in diagnostic_summaries {
|
||||
if let Some(path_matcher) = &options.path_matcher
|
||||
&& !path_matcher.is_match(&path)
|
||||
&& !path_matcher.is_match(&path.as_std_path())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@@ -294,7 +290,7 @@ fn collect_diagnostics(
|
||||
}
|
||||
|
||||
let last_end = output.text.len();
|
||||
let file_path = path.to_string_lossy().to_string();
|
||||
let file_path = path.display(path_style).to_string();
|
||||
if !glob_is_exact_file_match {
|
||||
writeln!(&mut output.text, "{file_path}").unwrap();
|
||||
}
|
||||
|
||||
@@ -14,11 +14,11 @@ use smol::stream::StreamExt;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
ops::{Range, RangeInclusive},
|
||||
path::{Path, PathBuf},
|
||||
path::Path,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use util::{ResultExt, rel_path::RelPath};
|
||||
use workspace::Workspace;
|
||||
use worktree::ChildEntriesOptions;
|
||||
|
||||
@@ -48,7 +48,7 @@ impl FileSlashCommand {
|
||||
include_dirs: true,
|
||||
include_ignored: false,
|
||||
};
|
||||
let entries = worktree.child_entries_with_options(Path::new(""), options);
|
||||
let entries = worktree.child_entries_with_options(RelPath::empty(), options);
|
||||
entries.map(move |entry| {
|
||||
(
|
||||
project::ProjectPath {
|
||||
@@ -61,19 +61,18 @@ impl FileSlashCommand {
|
||||
}))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let path_prefix: Arc<str> = Arc::default();
|
||||
let path_prefix: Arc<RelPath> = RelPath::empty().into();
|
||||
Task::ready(
|
||||
entries
|
||||
.into_iter()
|
||||
.filter_map(|(entry, is_dir)| {
|
||||
let worktree = project.worktree_for_id(entry.worktree_id, cx)?;
|
||||
let mut full_path = PathBuf::from(worktree.read(cx).root_name());
|
||||
full_path.push(&entry.path);
|
||||
let full_path = worktree.read(cx).root_name().join(&entry.path);
|
||||
Some(PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: entry.worktree_id.to_usize(),
|
||||
path: full_path.into(),
|
||||
path: full_path,
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir,
|
||||
@@ -104,7 +103,7 @@ impl FileSlashCommand {
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
&None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
@@ -149,6 +148,8 @@ impl SlashCommand for FileSlashCommand {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
|
||||
let path_style = workspace.read(cx).path_style(cx);
|
||||
|
||||
let paths = self.search_paths(
|
||||
arguments.last().cloned().unwrap_or_default(),
|
||||
cancellation_flag,
|
||||
@@ -161,14 +162,14 @@ impl SlashCommand for FileSlashCommand {
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|path_match| {
|
||||
let text = format!(
|
||||
"{}{}",
|
||||
path_match.path_prefix,
|
||||
path_match.path.to_string_lossy()
|
||||
);
|
||||
let text = path_match
|
||||
.path_prefix
|
||||
.join(&path_match.path)
|
||||
.display(path_style)
|
||||
.to_string();
|
||||
|
||||
let mut label = CodeLabel::default();
|
||||
let file_name = path_match.path.file_name()?.to_string_lossy();
|
||||
let file_name = path_match.path.file_name()?;
|
||||
let label_text = if path_match.is_dir {
|
||||
format!("{}/ ", file_name)
|
||||
} else {
|
||||
@@ -247,14 +248,13 @@ fn collect_files(
|
||||
cx.spawn(async move |cx| {
|
||||
for snapshot in snapshots {
|
||||
let worktree_id = snapshot.id();
|
||||
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
|
||||
let mut folded_directory_names_stack = Vec::new();
|
||||
let path_style = snapshot.path_style();
|
||||
let mut directory_stack: Vec<Arc<RelPath>> = Vec::new();
|
||||
let mut folded_directory_names: Arc<RelPath> = RelPath::empty().into();
|
||||
let mut is_top_level_directory = true;
|
||||
|
||||
for entry in snapshot.entries(false, 0) {
|
||||
let mut path_including_worktree_name = PathBuf::new();
|
||||
path_including_worktree_name.push(snapshot.root_name());
|
||||
path_including_worktree_name.push(&entry.path);
|
||||
let path_including_worktree_name = snapshot.root_name().join(&entry.path);
|
||||
|
||||
if !matchers
|
||||
.iter()
|
||||
@@ -277,13 +277,7 @@ fn collect_files(
|
||||
)))?;
|
||||
}
|
||||
|
||||
let filename = entry
|
||||
.path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
let filename = entry.path.file_name().unwrap_or_default().to_string();
|
||||
|
||||
if entry.is_dir() {
|
||||
// Auto-fold directories that contain no files
|
||||
@@ -292,24 +286,23 @@ fn collect_files(
|
||||
if child_entries.next().is_none() && child.kind.is_dir() {
|
||||
if is_top_level_directory {
|
||||
is_top_level_directory = false;
|
||||
folded_directory_names_stack.push(
|
||||
path_including_worktree_name.to_string_lossy().to_string(),
|
||||
);
|
||||
folded_directory_names =
|
||||
folded_directory_names.join(&path_including_worktree_name);
|
||||
} else {
|
||||
folded_directory_names_stack.push(filename.to_string());
|
||||
folded_directory_names =
|
||||
folded_directory_names.join(RelPath::unix(&filename).unwrap());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
// Skip empty directories
|
||||
folded_directory_names_stack.clear();
|
||||
folded_directory_names = RelPath::empty().into();
|
||||
continue;
|
||||
}
|
||||
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
|
||||
if prefix_paths.is_empty() {
|
||||
if folded_directory_names.is_empty() {
|
||||
let label = if is_top_level_directory {
|
||||
is_top_level_directory = false;
|
||||
path_including_worktree_name.to_string_lossy().to_string()
|
||||
path_including_worktree_name.display(path_style).to_string()
|
||||
} else {
|
||||
filename
|
||||
};
|
||||
@@ -320,28 +313,23 @@ fn collect_files(
|
||||
}))?;
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
|
||||
SlashCommandContent::Text {
|
||||
text: label,
|
||||
text: label.to_string(),
|
||||
run_commands_in_text: false,
|
||||
},
|
||||
)))?;
|
||||
directory_stack.push(entry.path.clone());
|
||||
} else {
|
||||
// todo(windows)
|
||||
// Potential bug: this assumes that the path separator is always `\` on Windows
|
||||
let entry_name = format!(
|
||||
"{}{}{}",
|
||||
prefix_paths,
|
||||
std::path::MAIN_SEPARATOR_STR,
|
||||
&filename
|
||||
);
|
||||
let entry_name =
|
||||
folded_directory_names.join(RelPath::unix(&filename).unwrap());
|
||||
let entry_name = entry_name.display(path_style);
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
|
||||
icon: IconName::Folder,
|
||||
label: entry_name.clone().into(),
|
||||
label: entry_name.to_string().into(),
|
||||
metadata: None,
|
||||
}))?;
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
|
||||
SlashCommandContent::Text {
|
||||
text: entry_name,
|
||||
text: entry_name.to_string(),
|
||||
run_commands_in_text: false,
|
||||
},
|
||||
)))?;
|
||||
@@ -356,7 +344,7 @@ fn collect_files(
|
||||
} else if entry.is_file() {
|
||||
let Some(open_buffer_task) = project_handle
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree_id, &entry.path), cx)
|
||||
project.open_buffer((worktree_id, entry.path.clone()), cx)
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
@@ -367,7 +355,7 @@ fn collect_files(
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
append_buffer_to_output(
|
||||
&snapshot,
|
||||
Some(&path_including_worktree_name),
|
||||
Some(path_including_worktree_name.display(path_style).as_ref()),
|
||||
&mut output,
|
||||
)
|
||||
.log_err();
|
||||
@@ -392,18 +380,18 @@ fn collect_files(
|
||||
}
|
||||
|
||||
pub fn codeblock_fence_for_path(
|
||||
path: Option<&Path>,
|
||||
path: Option<&str>,
|
||||
row_range: Option<RangeInclusive<u32>>,
|
||||
) -> String {
|
||||
let mut text = String::new();
|
||||
write!(text, "```").unwrap();
|
||||
|
||||
if let Some(path) = path {
|
||||
if let Some(extension) = path.extension().and_then(|ext| ext.to_str()) {
|
||||
if let Some(extension) = Path::new(path).extension().and_then(|ext| ext.to_str()) {
|
||||
write!(text, "{} ", extension).unwrap();
|
||||
}
|
||||
|
||||
write!(text, "{}", path.display()).unwrap();
|
||||
write!(text, "{path}").unwrap();
|
||||
} else {
|
||||
write!(text, "untitled").unwrap();
|
||||
}
|
||||
@@ -423,12 +411,12 @@ pub struct FileCommandMetadata {
|
||||
|
||||
pub fn build_entry_output_section(
|
||||
range: Range<usize>,
|
||||
path: Option<&Path>,
|
||||
path: Option<&str>,
|
||||
is_directory: bool,
|
||||
line_range: Option<Range<u32>>,
|
||||
) -> SlashCommandOutputSection<usize> {
|
||||
let mut label = if let Some(path) = path {
|
||||
path.to_string_lossy().to_string()
|
||||
path.to_string()
|
||||
} else {
|
||||
"untitled".to_string()
|
||||
};
|
||||
@@ -451,7 +439,7 @@ pub fn build_entry_output_section(
|
||||
} else {
|
||||
path.and_then(|path| {
|
||||
serde_json::to_value(FileCommandMetadata {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
path: path.to_string(),
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
@@ -462,10 +450,9 @@ pub fn build_entry_output_section(
|
||||
/// This contains a small fork of the util::paths::PathMatcher, that is stricter about the prefix
|
||||
/// check. Only subpaths pass the prefix check, rather than any prefix.
|
||||
mod custom_path_matcher {
|
||||
use std::{fmt::Debug as _, path::Path};
|
||||
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use util::paths::SanitizedPath;
|
||||
use std::fmt::Debug as _;
|
||||
use util::{paths::SanitizedPath, rel_path::RelPath};
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PathMatcher {
|
||||
@@ -492,12 +479,12 @@ mod custom_path_matcher {
|
||||
pub fn new(globs: &[String]) -> Result<Self, globset::Error> {
|
||||
let globs = globs
|
||||
.iter()
|
||||
.map(|glob| Glob::new(&SanitizedPath::new(glob).to_glob_string()))
|
||||
.map(|glob| Glob::new(&SanitizedPath::new(glob).to_string()))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect();
|
||||
let sources_with_trailing_slash = globs
|
||||
.iter()
|
||||
.map(|glob| glob.glob().to_string() + std::path::MAIN_SEPARATOR_STR)
|
||||
.map(|glob| glob.glob().to_string() + "/")
|
||||
.collect();
|
||||
let mut glob_builder = GlobSetBuilder::new();
|
||||
for single_glob in globs {
|
||||
@@ -511,16 +498,13 @@ mod custom_path_matcher {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
let other_path = other.as_ref();
|
||||
pub fn is_match(&self, other: &RelPath) -> bool {
|
||||
self.sources
|
||||
.iter()
|
||||
.zip(self.sources_with_trailing_slash.iter())
|
||||
.any(|(source, with_slash)| {
|
||||
let as_bytes = other_path.as_os_str().as_encoded_bytes();
|
||||
// todo(windows)
|
||||
// Potential bug: this assumes that the path separator is always `\` on Windows
|
||||
let with_slash = if source.ends_with(std::path::MAIN_SEPARATOR_STR) {
|
||||
let as_bytes = other.as_unix_str().as_bytes();
|
||||
let with_slash = if source.ends_with('/') {
|
||||
source.as_bytes()
|
||||
} else {
|
||||
with_slash.as_bytes()
|
||||
@@ -528,13 +512,13 @@ mod custom_path_matcher {
|
||||
|
||||
as_bytes.starts_with(with_slash) || as_bytes.ends_with(source.as_bytes())
|
||||
})
|
||||
|| self.glob.is_match(other_path)
|
||||
|| self.check_with_end_separator(other_path)
|
||||
|| self.glob.is_match(other.as_std_path())
|
||||
|| self.check_with_end_separator(other)
|
||||
}
|
||||
|
||||
fn check_with_end_separator(&self, path: &Path) -> bool {
|
||||
let path_str = path.to_string_lossy();
|
||||
let separator = std::path::MAIN_SEPARATOR_STR;
|
||||
fn check_with_end_separator(&self, path: &RelPath) -> bool {
|
||||
let path_str = path.as_unix_str();
|
||||
let separator = "/";
|
||||
if path_str.ends_with(separator) {
|
||||
false
|
||||
} else {
|
||||
@@ -546,7 +530,7 @@ mod custom_path_matcher {
|
||||
|
||||
pub fn append_buffer_to_output(
|
||||
buffer: &BufferSnapshot,
|
||||
path: Option<&Path>,
|
||||
path: Option<&str>,
|
||||
output: &mut SlashCommandOutput,
|
||||
) -> Result<()> {
|
||||
let prev_len = output.text.len();
|
||||
|
||||
@@ -137,7 +137,9 @@ pub fn selections_creases(
|
||||
None
|
||||
};
|
||||
let language_name = language_name.as_deref().unwrap_or("");
|
||||
let filename = snapshot.file_at(range.start).map(|file| file.full_path(cx));
|
||||
let filename = snapshot
|
||||
.file_at(range.start)
|
||||
.map(|file| file.full_path(cx).to_string_lossy().into_owned());
|
||||
let text = if language_name == "markdown" {
|
||||
selected_text
|
||||
.lines()
|
||||
@@ -187,9 +189,9 @@ pub fn selections_creases(
|
||||
let start_line = range.start.row + 1;
|
||||
let end_line = range.end.row + 1;
|
||||
if start_line == end_line {
|
||||
format!("{}, Line {}", path.display(), start_line)
|
||||
format!("{path}, Line {start_line}")
|
||||
} else {
|
||||
format!("{}, Lines {} to {}", path.display(), start_line, end_line)
|
||||
format!("{path}, Lines {start_line} to {end_line}")
|
||||
}
|
||||
} else {
|
||||
"Quoted selection".to_string()
|
||||
|
||||
@@ -7,8 +7,8 @@ use editor::Editor;
|
||||
use gpui::{AppContext as _, Task, WeakEntity};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use std::sync::Arc;
|
||||
use std::{path::Path, sync::atomic::AtomicBool};
|
||||
use ui::{App, IconName, Window};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use ui::{App, IconName, SharedString, Window};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct OutlineSlashCommand;
|
||||
@@ -67,13 +67,13 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let path = snapshot.resolve_file_path(cx, true);
|
||||
let path = snapshot.resolve_file_path(true, cx);
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let outline = snapshot.outline(None);
|
||||
|
||||
let path = path.as_deref().unwrap_or(Path::new("untitled"));
|
||||
let mut outline_text = format!("Symbols for {}:\n", path.display());
|
||||
let path = path.as_deref().unwrap_or("untitled");
|
||||
let mut outline_text = format!("Symbols for {path}:\n");
|
||||
for item in &outline.path_candidates {
|
||||
outline_text.push_str("- ");
|
||||
outline_text.push_str(&item.string);
|
||||
@@ -84,7 +84,7 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..outline_text.len(),
|
||||
icon: IconName::ListTree,
|
||||
label: path.to_string_lossy().to_string().into(),
|
||||
label: SharedString::new(path),
|
||||
metadata: None,
|
||||
}],
|
||||
text: outline_text,
|
||||
|
||||
@@ -8,12 +8,9 @@ use editor::Editor;
|
||||
use futures::future::join_all;
|
||||
use gpui::{Task, WeakEntity};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate};
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use std::sync::{Arc, atomic::AtomicBool};
|
||||
use ui::{ActiveTheme, App, Window, prelude::*};
|
||||
use util::ResultExt;
|
||||
use util::{ResultExt, paths::PathStyle};
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::file_command::append_buffer_to_output;
|
||||
@@ -72,35 +69,42 @@ impl SlashCommand for TabSlashCommand {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
}
|
||||
|
||||
let active_item_path = workspace.as_ref().and_then(|workspace| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let snapshot = active_item_buffer(workspace, cx).ok()?;
|
||||
snapshot.resolve_file_path(cx, true)
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no workspace")));
|
||||
};
|
||||
|
||||
let active_item_path = workspace.update(cx, |workspace, cx| {
|
||||
let snapshot = active_item_buffer(workspace, cx).ok()?;
|
||||
snapshot.resolve_file_path(true, cx)
|
||||
});
|
||||
let path_style = workspace.read(cx).path_style(cx);
|
||||
|
||||
let current_query = arguments.last().cloned().unwrap_or_default();
|
||||
let tab_items_search =
|
||||
tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx);
|
||||
let tab_items_search = tab_items_for_queries(
|
||||
Some(workspace.downgrade()),
|
||||
&[current_query],
|
||||
cancel,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
window.spawn(cx, async move |_| {
|
||||
let tab_items = tab_items_search.await?;
|
||||
let run_command = tab_items.len() == 1;
|
||||
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
|
||||
let path_string = path.as_deref()?.to_string_lossy().to_string();
|
||||
if argument_set.contains(&path_string) {
|
||||
let path = path?;
|
||||
if argument_set.contains(&path) {
|
||||
return None;
|
||||
}
|
||||
if active_item_path.is_some() && active_item_path == path {
|
||||
if active_item_path.as_ref() == Some(&path) {
|
||||
return None;
|
||||
}
|
||||
let label = create_tab_completion_label(path.as_ref()?, comment_id);
|
||||
let label = create_tab_completion_label(&path, path_style, comment_id);
|
||||
Some(ArgumentCompletion {
|
||||
label,
|
||||
new_text: path_string,
|
||||
new_text: path,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: run_command.into(),
|
||||
})
|
||||
@@ -109,8 +113,9 @@ impl SlashCommand for TabSlashCommand {
|
||||
let active_item_completion = active_item_path
|
||||
.as_deref()
|
||||
.map(|active_item_path| {
|
||||
let path_string = active_item_path.to_string_lossy().to_string();
|
||||
let label = create_tab_completion_label(active_item_path, comment_id);
|
||||
let path_string = active_item_path.to_string();
|
||||
let label =
|
||||
create_tab_completion_label(active_item_path, path_style, comment_id);
|
||||
ArgumentCompletion {
|
||||
label,
|
||||
new_text: path_string,
|
||||
@@ -169,7 +174,7 @@ fn tab_items_for_queries(
|
||||
strict_match: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
|
||||
) -> Task<anyhow::Result<Vec<(Option<String>, BufferSnapshot, usize)>>> {
|
||||
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
|
||||
let queries = queries.to_owned();
|
||||
window.spawn(cx, async move |cx| {
|
||||
@@ -179,7 +184,7 @@ fn tab_items_for_queries(
|
||||
.update(cx, |workspace, cx| {
|
||||
if strict_match && empty_query {
|
||||
let snapshot = active_item_buffer(workspace, cx)?;
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
let full_path = snapshot.resolve_file_path(true, cx);
|
||||
return anyhow::Ok(vec![(full_path, snapshot, 0)]);
|
||||
}
|
||||
|
||||
@@ -201,7 +206,7 @@ fn tab_items_for_queries(
|
||||
&& visited_buffers.insert(buffer.read(cx).remote_id())
|
||||
{
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
let full_path = snapshot.resolve_file_path(true, cx);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
}
|
||||
}
|
||||
@@ -224,10 +229,7 @@ fn tab_items_for_queries(
|
||||
let match_candidates = open_buffers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(id, (full_path, ..))| {
|
||||
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
|
||||
Some((id, path_string))
|
||||
})
|
||||
.filter_map(|(id, (full_path, ..))| Some((id, full_path.clone()?)))
|
||||
.fold(HashMap::default(), |mut candidates, (id, path_string)| {
|
||||
candidates
|
||||
.entry(path_string)
|
||||
@@ -249,8 +251,7 @@ fn tab_items_for_queries(
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(id, (full_path, ..))| {
|
||||
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
|
||||
Some(fuzzy::StringMatchCandidate::new(id, &path_string))
|
||||
Some(fuzzy::StringMatchCandidate::new(id, full_path.as_ref()?))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut processed_matches = HashSet::default();
|
||||
@@ -302,21 +303,15 @@ fn active_item_buffer(
|
||||
}
|
||||
|
||||
fn create_tab_completion_label(
|
||||
path: &std::path::Path,
|
||||
path: &str,
|
||||
path_style: PathStyle,
|
||||
comment_id: Option<HighlightId>,
|
||||
) -> CodeLabel {
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy())
|
||||
.unwrap_or_default();
|
||||
let parent_path = path
|
||||
.parent()
|
||||
.map(|p| p.to_string_lossy())
|
||||
.unwrap_or_default();
|
||||
let (parent_path, file_name) = path_style.split(path);
|
||||
let mut label = CodeLabel::default();
|
||||
label.push_str(&file_name, None);
|
||||
label.push_str(file_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(&parent_path, comment_id);
|
||||
label.push_str(parent_path.unwrap_or_default(), comment_id);
|
||||
label.filter_range = 0..file_name.len();
|
||||
label
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ use language::{Buffer, OutlineItem, ParseStatus};
|
||||
use project::Project;
|
||||
use regex::Regex;
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
use text::Point;
|
||||
|
||||
/// For files over this size, instead of reading them (or including them in context),
|
||||
@@ -143,7 +142,7 @@ pub struct BufferContent {
|
||||
/// For smaller files, returns the full content.
|
||||
pub async fn get_buffer_content_or_outline(
|
||||
buffer: Entity<Buffer>,
|
||||
path: Option<&Path>,
|
||||
path: Option<&str>,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<BufferContent> {
|
||||
let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len())?;
|
||||
@@ -170,15 +169,10 @@ pub async fn get_buffer_content_or_outline(
|
||||
|
||||
let text = if let Some(path) = path {
|
||||
format!(
|
||||
"# File outline for {} (file too large to show full content)\n\n{}",
|
||||
path.display(),
|
||||
outline_text
|
||||
"# File outline for {path} (file too large to show full content)\n\n{outline_text}",
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"# File outline (file too large to show full content)\n\n{}",
|
||||
outline_text
|
||||
)
|
||||
format!("# File outline (file too large to show full content)\n\n{outline_text}",)
|
||||
};
|
||||
Ok(BufferContent {
|
||||
text,
|
||||
|
||||
@@ -96,9 +96,7 @@ impl Tool for CopyPathTool {
|
||||
.and_then(|project_path| project.entry_for_path(&project_path, cx))
|
||||
{
|
||||
Some(entity) => match project.find_project_path(&input.destination_path, cx) {
|
||||
Some(project_path) => {
|
||||
project.copy_entry(entity.id, None, project_path.path, cx)
|
||||
}
|
||||
Some(project_path) => project.copy_entry(entity.id, project_path, cx),
|
||||
None => Task::ready(Err(anyhow!(
|
||||
"Destination path {} was outside the project.",
|
||||
input.destination_path
|
||||
|
||||
@@ -8,7 +8,7 @@ use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchem
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Write, path::Path, sync::Arc};
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
|
||||
@@ -150,9 +150,7 @@ impl Tool for DiagnosticsTool {
|
||||
has_diagnostics = true;
|
||||
output.push_str(&format!(
|
||||
"{}: {} error(s), {} warning(s)\n",
|
||||
Path::new(worktree.read(cx).root_name())
|
||||
.join(project_path.path)
|
||||
.display(),
|
||||
worktree.read(cx).absolutize(&project_path.path).display(),
|
||||
summary.error_count,
|
||||
summary.warning_count
|
||||
));
|
||||
|
||||
@@ -26,13 +26,13 @@ use language_model::{
|
||||
use project::{AgentLocation, Project};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp, iter, mem, ops::Range, path::PathBuf, pin::Pin, sync::Arc, task::Poll};
|
||||
use std::{cmp, iter, mem, ops::Range, pin::Pin, sync::Arc, task::Poll};
|
||||
use streaming_diff::{CharOperation, StreamingDiff};
|
||||
use streaming_fuzzy_matcher::StreamingFuzzyMatcher;
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct CreateFilePromptTemplate {
|
||||
path: Option<PathBuf>,
|
||||
path: Option<String>,
|
||||
edit_description: String,
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ impl Template for CreateFilePromptTemplate {
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EditFileXmlPromptTemplate {
|
||||
path: Option<PathBuf>,
|
||||
path: Option<String>,
|
||||
edit_description: String,
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ impl Template for EditFileXmlPromptTemplate {
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EditFileDiffFencedPromptTemplate {
|
||||
path: Option<PathBuf>,
|
||||
path: Option<String>,
|
||||
edit_description: String,
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ impl EditAgent {
|
||||
let conversation = conversation.clone();
|
||||
let output = cx.spawn(async move |cx| {
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
|
||||
let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?;
|
||||
let prompt = CreateFilePromptTemplate {
|
||||
path,
|
||||
edit_description,
|
||||
@@ -229,7 +229,7 @@ impl EditAgent {
|
||||
let edit_format = self.edit_format;
|
||||
let output = cx.spawn(async move |cx| {
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
|
||||
let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?;
|
||||
let prompt = match edit_format {
|
||||
EditFormat::XmlTags => EditFileXmlPromptTemplate {
|
||||
path,
|
||||
|
||||
@@ -38,6 +38,7 @@ use settings::Settings;
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
collections::HashSet,
|
||||
ffi::OsStr,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -45,7 +46,7 @@ use std::{
|
||||
};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*};
|
||||
use util::ResultExt;
|
||||
use util::{ResultExt, rel_path::RelPath};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct EditFileTool;
|
||||
@@ -146,11 +147,11 @@ impl Tool for EditFileTool {
|
||||
|
||||
// If any path component matches the local settings folder, then this could affect
|
||||
// the editor in ways beyond the project source, so prompt.
|
||||
let local_settings_folder = paths::local_settings_folder_relative_path();
|
||||
let local_settings_folder = paths::local_settings_folder_name();
|
||||
let path = Path::new(&input.path);
|
||||
if path
|
||||
.components()
|
||||
.any(|component| component.as_os_str() == local_settings_folder.as_os_str())
|
||||
.any(|c| c.as_os_str() == <str as AsRef<OsStr>>::as_ref(local_settings_folder))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
@@ -195,10 +196,10 @@ impl Tool for EditFileTool {
|
||||
let mut description = input.display_description.clone();
|
||||
|
||||
// Add context about why confirmation may be needed
|
||||
let local_settings_folder = paths::local_settings_folder_relative_path();
|
||||
let local_settings_folder = paths::local_settings_folder_name();
|
||||
if path
|
||||
.components()
|
||||
.any(|c| c.as_os_str() == local_settings_folder.as_os_str())
|
||||
.any(|c| c.as_os_str() == <str as AsRef<OsStr>>::as_ref(local_settings_folder))
|
||||
{
|
||||
description.push_str(" (local settings)");
|
||||
} else if let Ok(canonical_path) = std::fs::canonicalize(&input.path)
|
||||
@@ -377,7 +378,7 @@ impl Tool for EditFileTool {
|
||||
.await;
|
||||
|
||||
let output = EditFileToolOutput {
|
||||
original_path: project_path.path.to_path_buf(),
|
||||
original_path: project_path.path.as_std_path().to_owned(),
|
||||
new_text,
|
||||
old_text,
|
||||
raw_output: Some(agent_output),
|
||||
@@ -549,10 +550,11 @@ fn resolve_path(
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.and_then(|file_name| file_name.to_str())
|
||||
.context("Can't create file: invalid filename")?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
path: parent.path.join(RelPath::unix(file_name).unwrap()),
|
||||
..parent
|
||||
});
|
||||
|
||||
@@ -1236,7 +1238,7 @@ mod tests {
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::fs;
|
||||
use util::path;
|
||||
use util::{path, rel_path::rel_path};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_edit_nonexistent_file(cx: &mut TestAppContext) {
|
||||
@@ -1355,14 +1357,10 @@ mod tests {
|
||||
cx.update(|cx| resolve_path(&input, project, cx))
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "/"); // Naive Windows paths normalization
|
||||
assert_eq!(actual, expected);
|
||||
let actual = path.expect("Should return valid path").path;
|
||||
assert_eq!(actual.as_ref(), rel_path(expected));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1976,25 +1974,22 @@ mod tests {
|
||||
let project = Project::test(fs.clone(), [path!("/home/user/myproject").as_ref()], cx).await;
|
||||
|
||||
// Get the actual local settings folder name
|
||||
let local_settings_folder = paths::local_settings_folder_relative_path();
|
||||
let local_settings_folder = paths::local_settings_folder_name();
|
||||
|
||||
// Test various config path patterns
|
||||
let test_cases = vec![
|
||||
(
|
||||
format!("{}/settings.json", local_settings_folder.display()),
|
||||
format!("{local_settings_folder}/settings.json"),
|
||||
true,
|
||||
"Top-level local settings file".to_string(),
|
||||
),
|
||||
(
|
||||
format!(
|
||||
"myproject/{}/settings.json",
|
||||
local_settings_folder.display()
|
||||
),
|
||||
format!("myproject/{local_settings_folder}/settings.json"),
|
||||
true,
|
||||
"Local settings in project path".to_string(),
|
||||
),
|
||||
(
|
||||
format!("src/{}/config.toml", local_settings_folder.display()),
|
||||
format!("src/{local_settings_folder}/config.toml"),
|
||||
true,
|
||||
"Local settings in subdirectory".to_string(),
|
||||
),
|
||||
@@ -2205,12 +2200,7 @@ mod tests {
|
||||
("", false, "Empty path is treated as project root"),
|
||||
// Root directory
|
||||
("/", true, "Root directory should be outside project"),
|
||||
// Parent directory references - find_project_path resolves these
|
||||
(
|
||||
"project/../other",
|
||||
false,
|
||||
"Path with .. is resolved by find_project_path",
|
||||
),
|
||||
("project/../other", true, "Path with .. is outside project"),
|
||||
(
|
||||
"project/./src/file.rs",
|
||||
false,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user