Compare commits
77 Commits
ai-worlds-
...
github-tok
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a6a5f55a05 | ||
|
|
dda614091a | ||
|
|
fa9da6ad5b | ||
|
|
d082cfdbec | ||
|
|
c71791d64e | ||
|
|
244d8517f1 | ||
|
|
3884de937b | ||
|
|
8af984ae70 | ||
|
|
9d533f9d30 | ||
|
|
274a40b7e0 | ||
|
|
9c7b1d19ce | ||
|
|
3d9881121f | ||
|
|
a2e98e9f0e | ||
|
|
7c64737e00 | ||
|
|
8191a5339d | ||
|
|
17c3b741ec | ||
|
|
52770cd3ad | ||
|
|
4ac67ac5ae | ||
|
|
8c1b549683 | ||
|
|
ff6ac60bad | ||
|
|
f8ab51307a | ||
|
|
0a2186c87b | ||
|
|
c3653f4cb1 | ||
|
|
8b28941c14 | ||
|
|
aefb798090 | ||
|
|
2c5aa5891d | ||
|
|
7d54d9f45e | ||
|
|
cde47e60cd | ||
|
|
79f96a5afe | ||
|
|
81058ee172 | ||
|
|
89743117c6 | ||
|
|
6de37fa57c | ||
|
|
beb0d49dc4 | ||
|
|
c9aadadc4b | ||
|
|
bcd182f480 | ||
|
|
3987b60738 | ||
|
|
827103908e | ||
|
|
8e9e3ba1a5 | ||
|
|
676ed8fb8a | ||
|
|
4304521655 | ||
|
|
04716a0e4a | ||
|
|
5e38915d45 | ||
|
|
f9257b0efe | ||
|
|
5d0c96872b | ||
|
|
071e684be4 | ||
|
|
2280594408 | ||
|
|
09a1d51e9a | ||
|
|
ac15194d11 | ||
|
|
988d834c33 | ||
|
|
48eacf3f2a | ||
|
|
030d4d2631 | ||
|
|
10df7b5eb9 | ||
|
|
55120c4231 | ||
|
|
8227c45a11 | ||
|
|
d23359e19a | ||
|
|
936ad0bf10 | ||
|
|
faa0bb51c9 | ||
|
|
2db2271e3c | ||
|
|
79b1dd7db8 | ||
|
|
81f8e2ed4a | ||
|
|
b9256dd469 | ||
|
|
27d3da678c | ||
|
|
03357f3f7b | ||
|
|
4aabba6cf6 | ||
|
|
8c46a4f594 | ||
|
|
522abe8e59 | ||
|
|
5ae8c4cf09 | ||
|
|
d8195a8fd7 | ||
|
|
2645591cd5 | ||
|
|
526a7c0702 | ||
|
|
e793740168 | ||
|
|
dea0a58727 | ||
|
|
b7abc9d493 | ||
|
|
01a77bb231 | ||
|
|
de225fd242 | ||
|
|
1bc052d76b | ||
|
|
29cb95a3ca |
26
.github/actions/build_docs/action.yml
vendored
Normal file
26
.github/actions/build_docs/action.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: "Build docs"
|
||||
description: "Build the docs"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: ./script/linux
|
||||
|
||||
- name: Build book
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
21
.github/workflows/ci.yml
vendored
21
.github/workflows/ci.yml
vendored
@@ -191,6 +191,27 @@ jobs:
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
check_docs:
|
||||
timeout-minutes: 60
|
||||
name: Check docs
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Build docs
|
||||
uses: ./.github/actions/build_docs
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
|
||||
19
.github/workflows/deploy_cloudflare.yml
vendored
19
.github/workflows/deploy_cloudflare.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
deploy-docs:
|
||||
name: Deploy Docs
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: buildjet-16vcpu-ubuntu-2204
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
@@ -17,24 +17,11 @@ jobs:
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
- name: Set up default .cargo/config.toml
|
||||
run: cp ./.cargo/collab-config.toml ./.cargo/config.toml
|
||||
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libxkbcommon-dev libxkbcommon-x11-dev
|
||||
|
||||
- name: Build book
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
- name: Build docs
|
||||
uses: ./.github/actions/build_docs
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
|
||||
85
.github/workflows/unit_evals.yml
vendored
Normal file
85
.github/workflows/unit_evals.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: Run Unit Evals
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
|
||||
- cron: "47 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
|
||||
jobs:
|
||||
unit_evals:
|
||||
timeout-minutes: 60
|
||||
name: Run unit evals
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run unit evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)' --test-threads 1
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
- name: Send the pull request link into the Slack channel
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
6
.rules
6
.rules
@@ -5,6 +5,12 @@
|
||||
* Prefer implementing functionality in existing files unless it is a new logical component. Avoid creating many small files.
|
||||
* Avoid using functions that panic like `unwrap()`, instead use mechanisms like `?` to propagate errors.
|
||||
* Be careful with operations like indexing which may panic if the indexes are out of bounds.
|
||||
* Never silently discard errors with `let _ =` on fallible operations. Always handle errors appropriately:
|
||||
- Propagate errors with `?` when the calling function should handle them
|
||||
- Use `.log_err()` or similar when you need to ignore errors but want visibility
|
||||
- Use explicit error handling with `match` or `if let Err(...)` when you need custom logic
|
||||
- Example: avoid `let _ = client.request(...).await?;` - use `client.request(...).await?;` instead
|
||||
* When implementing async operations that may fail, ensure errors propagate to the UI layer so users get meaningful feedback.
|
||||
* Never create files with `mod.rs` paths - prefer `src/some_module.rs` instead of `src/some_module/mod.rs`.
|
||||
|
||||
# GPUI
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -631,6 +631,7 @@ name = "assistant_tool"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-watch",
|
||||
"buffer_diff",
|
||||
"clock",
|
||||
"collections",
|
||||
@@ -4542,6 +4543,8 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"command_palette",
|
||||
"gpui",
|
||||
"mdbook",
|
||||
"regex",
|
||||
"serde",
|
||||
@@ -4549,6 +4552,7 @@ dependencies = [
|
||||
"settings",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
"zed",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -12113,6 +12117,7 @@ dependencies = [
|
||||
"unindent",
|
||||
"url",
|
||||
"util",
|
||||
"uuid",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
"worktree",
|
||||
@@ -16508,9 +16513,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.5"
|
||||
version = "0.25.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac5fff5c47490dfdf473b5228039bfacad9d765d9b6939d26bf7cc064c1c7822"
|
||||
checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -16523,9 +16528,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-bash"
|
||||
version = "0.23.3"
|
||||
version = "0.25.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "329a4d48623ac337d42b1df84e81a1c9dbb2946907c102ca72db158c1964a52e"
|
||||
checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -17129,6 +17134,7 @@ dependencies = [
|
||||
"futures-lite 1.13.0",
|
||||
"git2",
|
||||
"globset",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
@@ -19706,7 +19712,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.190.0"
|
||||
version = "0.191.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
|
||||
@@ -574,8 +574,8 @@ tokio = { version = "1" }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
|
||||
toml = "0.8"
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.5", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.23"
|
||||
tree-sitter = { version = "0.25.6", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = "0.23"
|
||||
tree-sitter-css = "0.23"
|
||||
|
||||
1
assets/icons/list_todo.svg
Normal file
1
assets/icons/list_todo.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-list-todo-icon lucide-list-todo"><rect x="3" y="5" width="6" height="6" rx="1"/><path d="m3 17 2 2 4-4"/><path d="M13 6h8"/><path d="M13 12h8"/><path d="M13 18h8"/></svg>
|
||||
|
After Width: | Height: | Size: 373 B |
@@ -1 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M6.97942 1.25171L6.9585 1.30199L5.58662 4.60039C5.54342 4.70426 5.44573 4.77523 5.3336 4.78422L1.7727 5.0697L1.71841 5.07405L1.38687 5.10063L1.08608 5.12475C0.820085 5.14607 0.712228 5.47802 0.914889 5.65162L1.14406 5.84793L1.39666 6.06431L1.43802 6.09974L4.15105 8.42374C4.23648 8.49692 4.2738 8.61176 4.24769 8.72118L3.41882 12.196L3.40618 12.249L3.32901 12.5725L3.25899 12.866C3.19708 13.1256 3.47945 13.3308 3.70718 13.1917L3.9647 13.0344L4.24854 12.861L4.29502 12.8326L7.34365 10.9705C7.43965 10.9119 7.5604 10.9119 7.6564 10.9705L10.705 12.8326L10.7515 12.861L11.0354 13.0344L11.2929 13.1917C11.5206 13.3308 11.803 13.1256 11.7411 12.866L11.671 12.5725L11.5939 12.249L11.5812 12.196L10.7524 8.72118C10.7263 8.61176 10.7636 8.49692 10.849 8.42374L13.562 6.09974L13.6034 6.06431L13.856 5.84793L14.0852 5.65162C14.2878 5.47802 14.18 5.14607 13.914 5.12475L13.6132 5.10063L13.2816 5.07405L13.2274 5.0697L9.66645 4.78422C9.55432 4.77523 9.45663 4.70426 9.41343 4.60039L8.04155 1.30199L8.02064 1.25171L7.89291 0.944609L7.77702 0.665992C7.67454 0.419604 7.32551 0.419604 7.22303 0.665992L7.10715 0.944609L6.97942 1.25171ZM7.50003 2.60397L6.50994 4.98442C6.32273 5.43453 5.89944 5.74207 5.41351 5.78103L2.84361 5.98705L4.8016 7.66428C5.17183 7.98142 5.33351 8.47903 5.2204 8.95321L4.62221 11.461L6.8224 10.1171C7.23842 9.86302 7.76164 9.86302 8.17766 10.1171L10.3778 11.461L9.77965 8.95321C9.66654 8.47903 9.82822 7.98142 10.1984 7.66428L12.1564 5.98705L9.58654 5.78103C9.10061 5.74207 8.67732 5.43453 8.49011 4.98442L7.50003 2.60397Z" fill="currentColor" fill-rule="evenodd" clip-rule="evenodd"></path></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.68323 1.53C7.71245 1.47097 7.75758 1.42129 7.81353 1.38655C7.86949 1.35181 7.93404 1.3334 7.9999 1.3334C8.06576 1.3334 8.13031 1.35181 8.18626 1.38655C8.24222 1.42129 8.28735 1.47097 8.31656 1.53L9.85656 4.64933C9.95802 4.85465 10.1078 5.03227 10.293 5.16697C10.4782 5.30167 10.6933 5.38941 10.9199 5.42267L14.3639 5.92667C14.4292 5.93612 14.4905 5.96365 14.5409 6.00613C14.5913 6.04862 14.6289 6.10437 14.6492 6.16707C14.6696 6.22978 14.6721 6.29694 14.6563 6.36096C14.6405 6.42498 14.6071 6.4833 14.5599 6.52933L12.0692 8.95467C11.905 9.11473 11.7821 9.31232 11.7111 9.53042C11.6402 9.74852 11.6233 9.98059 11.6619 10.2067L12.2499 13.6333C12.2614 13.6986 12.2544 13.7657 12.2296 13.8271C12.2048 13.8885 12.1632 13.9417 12.1096 13.9807C12.056 14.0196 11.9926 14.0427 11.9265 14.0473C11.8604 14.0519 11.7944 14.0378 11.7359 14.0067L8.65723 12.388C8.45438 12.2815 8.22868 12.2258 7.99956 12.2258C7.77044 12.2258 7.54475 12.2815 7.3419 12.388L4.2639 14.0067C4.20545 14.0376 4.1395 14.0515 4.07353 14.0468C4.00757 14.0421 3.94424 14.019 3.89076 13.9801C3.83728 13.9413 3.79579 13.8881 3.771 13.8268C3.74622 13.7655 3.73914 13.6985 3.75056 13.6333L4.3379 10.2073C4.3767 9.98116 4.35989 9.74893 4.28892 9.5307C4.21796 9.31246 4.09497 9.11477 3.93056 8.95467L1.4399 6.53C1.39229 6.48402 1.35856 6.4256 1.34254 6.36138C1.32652 6.29717 1.32886 6.22975 1.34928 6.16679C1.36971 6.10384 1.40741 6.04789 1.45808 6.00532C1.50876 5.96275 1.57037 5.93527 1.6359 5.926L5.07923 5.42267C5.30607 5.38967 5.52149 5.30204 5.70695 5.16733C5.89242 5.03261 6.04237 4.85485 6.1439 4.64933L7.68323 1.53Z" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
@@ -1 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M7.22303 0.665992C7.32551 0.419604 7.67454 0.419604 7.77702 0.665992L9.41343 4.60039C9.45663 4.70426 9.55432 4.77523 9.66645 4.78422L13.914 5.12475C14.18 5.14607 14.2878 5.47802 14.0852 5.65162L10.849 8.42374C10.7636 8.49692 10.7263 8.61176 10.7524 8.72118L11.7411 12.866C11.803 13.1256 11.5206 13.3308 11.2929 13.1917L7.6564 10.9705C7.5604 10.9119 7.43965 10.9119 7.34365 10.9705L3.70718 13.1917C3.47945 13.3308 3.19708 13.1256 3.25899 12.866L4.24769 8.72118C4.2738 8.61176 4.23648 8.49692 4.15105 8.42374L0.914889 5.65162C0.712228 5.47802 0.820086 5.14607 1.08608 5.12475L5.3336 4.78422C5.44573 4.77523 5.54342 4.70426 5.58662 4.60039L7.22303 0.665992Z" fill="currentColor"></path></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.68323 1.53C7.71245 1.47097 7.75758 1.42129 7.81353 1.38655C7.86949 1.35181 7.93404 1.3334 7.9999 1.3334C8.06576 1.3334 8.13031 1.35181 8.18626 1.38655C8.24222 1.42129 8.28735 1.47097 8.31656 1.53L9.85656 4.64933C9.95802 4.85465 10.1078 5.03227 10.293 5.16697C10.4782 5.30167 10.6933 5.38941 10.9199 5.42267L14.3639 5.92667C14.4292 5.93612 14.4905 5.96365 14.5409 6.00613C14.5913 6.04862 14.6289 6.10437 14.6492 6.16707C14.6696 6.22978 14.6721 6.29694 14.6563 6.36096C14.6405 6.42498 14.6071 6.4833 14.5599 6.52933L12.0692 8.95467C11.905 9.11473 11.7821 9.31232 11.7111 9.53042C11.6402 9.74852 11.6233 9.98059 11.6619 10.2067L12.2499 13.6333C12.2614 13.6986 12.2544 13.7657 12.2296 13.8271C12.2048 13.8885 12.1632 13.9417 12.1096 13.9807C12.056 14.0196 11.9926 14.0427 11.9265 14.0473C11.8604 14.0519 11.7944 14.0378 11.7359 14.0067L8.65723 12.388C8.45438 12.2815 8.22868 12.2258 7.99956 12.2258C7.77044 12.2258 7.54475 12.2815 7.3419 12.388L4.2639 14.0067C4.20545 14.0376 4.1395 14.0515 4.07353 14.0468C4.00757 14.0421 3.94424 14.019 3.89076 13.9801C3.83728 13.9413 3.79579 13.8881 3.771 13.8268C3.74622 13.7655 3.73914 13.6985 3.75056 13.6333L4.3379 10.2073C4.3767 9.98116 4.35989 9.74893 4.28892 9.5307C4.21796 9.31246 4.09497 9.11477 3.93056 8.95467L1.4399 6.53C1.39229 6.48402 1.35856 6.4256 1.34254 6.36138C1.32652 6.29717 1.32886 6.22975 1.34928 6.16679C1.36971 6.10384 1.40741 6.04789 1.45808 6.00532C1.50876 5.96275 1.57037 5.93527 1.6359 5.926L5.07923 5.42267C5.30607 5.38967 5.52149 5.30204 5.70695 5.16733C5.89242 5.03261 6.04237 4.85485 6.1439 4.64933L7.68323 1.53Z" fill="black" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 794 B After Width: | Height: | Size: 1.7 KiB |
@@ -120,7 +120,7 @@
|
||||
"ctrl-'": "editor::ToggleSelectedDiffHunks",
|
||||
"ctrl-\"": "editor::ExpandAllDiffHunks",
|
||||
"ctrl-i": "editor::ShowSignatureHelp",
|
||||
"alt-g b": "editor::ToggleGitBlame",
|
||||
"alt-g b": "git::Blame",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
@@ -278,7 +278,9 @@
|
||||
"enter": "agent::Chat",
|
||||
"ctrl-enter": "agent::ChatWithFollow",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff",
|
||||
"ctrl-shift-y": "agent::KeepAll",
|
||||
"ctrl-shift-n": "agent::RejectAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -510,14 +512,14 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"alt-open": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }],
|
||||
"alt-open": "projects::OpenRecent",
|
||||
"alt-ctrl-o": "projects::OpenRecent",
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
"alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
"alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
// Change to open path modal for existing remote connection by setting the parameter
|
||||
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
|
||||
"alt-ctrl-shift-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
@@ -909,7 +911,9 @@
|
||||
"context": "CollabPanel && not_editing",
|
||||
"bindings": {
|
||||
"ctrl-backspace": "collab_panel::Remove",
|
||||
"space": "menu::Confirm"
|
||||
"space": "menu::Confirm",
|
||||
"ctrl-up": "collab_panel::MoveChannelUp",
|
||||
"ctrl-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -138,7 +138,7 @@
|
||||
"cmd-;": "editor::ToggleLineNumbers",
|
||||
"cmd-'": "editor::ToggleSelectedDiffHunks",
|
||||
"cmd-\"": "editor::ExpandAllDiffHunks",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame",
|
||||
"cmd-alt-g b": "git::Blame",
|
||||
"cmd-i": "editor::ShowSignatureHelp",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint",
|
||||
@@ -315,7 +315,9 @@
|
||||
"enter": "agent::Chat",
|
||||
"cmd-enter": "agent::ChatWithFollow",
|
||||
"cmd-i": "agent::ToggleProfileSelector",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff",
|
||||
"cmd-shift-y": "agent::KeepAll",
|
||||
"cmd-shift-n": "agent::RejectAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -582,9 +584,9 @@
|
||||
"bindings": {
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"ctrl-cmd-o": "projects::OpenRemote",
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true }],
|
||||
"alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
"ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }],
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
@@ -965,7 +967,9 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-backspace": "collab_panel::Remove",
|
||||
"space": "menu::Confirm"
|
||||
"space": "menu::Confirm",
|
||||
"cmd-up": "collab_panel::MoveChannelUp",
|
||||
"cmd-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -198,6 +198,8 @@
|
||||
"9": ["vim::Number", 9],
|
||||
"ctrl-w d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w g d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w ]": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w ctrl-]": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
"ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
"ctrl-w space": "editor::OpenExcerptsSplit",
|
||||
|
||||
@@ -17,13 +17,13 @@ You are a highly skilled software engineer with extensive knowledge in many prog
|
||||
4. Use only the tools that are currently available.
|
||||
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
|
||||
6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers.
|
||||
7. Avoid HTML entity escaping - use plain characters instead.
|
||||
|
||||
## Searching and Reading
|
||||
|
||||
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
|
||||
|
||||
{{! TODO: If there are files, we should mention it but otherwise omit that fact }}
|
||||
{{#if has_tools}}
|
||||
If appropriate, use tool calls to explore the current project, which contains the following root directories:
|
||||
|
||||
{{#each worktrees}}
|
||||
@@ -38,7 +38,6 @@ If appropriate, use tool calls to explore the current project, which contains th
|
||||
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
|
||||
- The user might specify a partial file path. If you don't know the full path, use `find_path` (not `grep`) before you read the file.
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
{{else}}
|
||||
You are being tasked with providing a response, but you have no ability to use tools or to read or write any aspect of the user's system (other than any context the user might have provided to you).
|
||||
|
||||
|
||||
@@ -73,9 +73,6 @@
|
||||
"unnecessary_code_fade": 0.3,
|
||||
// Active pane styling settings.
|
||||
"active_pane_modifiers": {
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"magnification": 1.0,
|
||||
// Inset border size of the active pane, in pixels.
|
||||
"border_size": 0.0,
|
||||
// Opacity of the inactive panes. 0 means transparent, 1 means opaque.
|
||||
@@ -536,6 +533,9 @@
|
||||
"function": false
|
||||
}
|
||||
},
|
||||
// Whether to resize all the panels in a dock when resizing the dock.
|
||||
// Can be a combination of "left", "right" and "bottom".
|
||||
"resize_all_panels_in_dock": ["left"],
|
||||
"project_panel": {
|
||||
// Whether to show the project panel button in the status bar
|
||||
"button": true,
|
||||
@@ -1500,11 +1500,11 @@
|
||||
}
|
||||
},
|
||||
"LaTeX": {
|
||||
"format_on_save": "on",
|
||||
"formatter": "language_server",
|
||||
"language_servers": ["texlab", "..."],
|
||||
"prettier": {
|
||||
"allowed": false
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-latex"]
|
||||
}
|
||||
},
|
||||
"Markdown": {
|
||||
@@ -1528,7 +1528,7 @@
|
||||
"allow_rewrap": "anywhere"
|
||||
},
|
||||
"Ruby": {
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "..."]
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."]
|
||||
},
|
||||
"SCSS": {
|
||||
"prettier": {
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use crate::AgentPanel;
|
||||
use crate::context::{AgentContextHandle, RULES_ICON};
|
||||
use crate::context_picker::{ContextPicker, MentionLink};
|
||||
use crate::context_store::ContextStore;
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::message_editor::insert_message_creases;
|
||||
use crate::message_editor::{extract_message_creases, insert_message_creases};
|
||||
use crate::thread::{
|
||||
LastRestoreCheckpoint, MessageCrease, MessageId, MessageSegment, Thread, ThreadError,
|
||||
ThreadEvent, ThreadFeedback, ThreadSummary,
|
||||
@@ -13,6 +12,7 @@ use crate::tool_use::{PendingToolUseStatus, ToolUse};
|
||||
use crate::ui::{
|
||||
AddedContext, AgentNotification, AgentNotificationEvent, AnimatedLabel, ContextPill,
|
||||
};
|
||||
use crate::{AgentPanel, ModelUsageContext};
|
||||
use agent_settings::{AgentSettings, NotifyWhenAgentWaiting};
|
||||
use anyhow::Context as _;
|
||||
use assistant_tool::ToolUseStatus;
|
||||
@@ -1348,6 +1348,7 @@ impl ActiveThread {
|
||||
Some(self.text_thread_store.downgrade()),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::File,
|
||||
ModelUsageContext::Thread(self.thread.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1517,31 +1518,7 @@ impl ActiveThread {
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
return;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
self.context_store.update(cx, |store, cx| {
|
||||
for image in images {
|
||||
store.add_image_instance(Arc::new(image), cx);
|
||||
}
|
||||
});
|
||||
attach_pasted_images_as_context(&self.context_store, cx);
|
||||
}
|
||||
|
||||
fn cancel_editing_message(
|
||||
@@ -1586,6 +1563,8 @@ impl ActiveThread {
|
||||
|
||||
let edited_text = state.editor.read(cx).text(cx);
|
||||
|
||||
let creases = state.editor.update(cx, extract_message_creases);
|
||||
|
||||
let new_context = self
|
||||
.context_store
|
||||
.read(cx)
|
||||
@@ -1610,6 +1589,7 @@ impl ActiveThread {
|
||||
message_id,
|
||||
Role::User,
|
||||
vec![MessageSegment::Text(edited_text)],
|
||||
creases,
|
||||
Some(context.loaded_context),
|
||||
checkpoint.ok(),
|
||||
cx,
|
||||
@@ -1823,9 +1803,10 @@ impl ActiveThread {
|
||||
|
||||
// Get all the data we need from thread before we start using it in closures
|
||||
let checkpoint = thread.checkpoint_for_message(message_id);
|
||||
let configured_model = thread.configured_model().map(|m| m.model);
|
||||
let added_context = thread
|
||||
.context_for_message(message_id)
|
||||
.map(|context| AddedContext::new_attached(context, cx))
|
||||
.map(|context| AddedContext::new_attached(context, configured_model.as_ref(), cx))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let tool_uses = thread.tool_uses_for_message(message_id, cx);
|
||||
@@ -3648,6 +3629,38 @@ pub(crate) fn open_context(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn attach_pasted_images_as_context(
|
||||
context_store: &Entity<ContextStore>,
|
||||
cx: &mut App,
|
||||
) -> bool {
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
return false;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
context_store.update(cx, |store, cx| {
|
||||
for image in images {
|
||||
store.add_image_instance(Arc::new(image), cx);
|
||||
}
|
||||
});
|
||||
true
|
||||
}
|
||||
|
||||
fn open_editor_at_position(
|
||||
project_path: project::ProjectPath,
|
||||
target_position: Point,
|
||||
@@ -3677,10 +3690,13 @@ fn open_editor_at_position(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use assistant_tool::{ToolRegistry, ToolWorkingSet};
|
||||
use editor::EditorSettings;
|
||||
use editor::{EditorSettings, display_map::CreaseMetadata};
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext, TestAppContext, VisualTestContext};
|
||||
use language_model::{LanguageModel, fake_provider::FakeLanguageModel};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelRegistry,
|
||||
fake_provider::{FakeLanguageModel, FakeLanguageModelProvider},
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde_json::json;
|
||||
@@ -3741,6 +3757,87 @@ mod tests {
|
||||
assert!(!cx.read(|cx| workspace.read(cx).is_being_followed(CollaboratorId::Agent)));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_reinserting_creases_for_edited_message(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
|
||||
let (cx, active_thread, _, thread, model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
cx.update(|_, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_default_model(
|
||||
Some(ConfiguredModel {
|
||||
provider: Arc::new(FakeLanguageModelProvider),
|
||||
model,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
let creases = vec![MessageCrease {
|
||||
range: 14..22,
|
||||
metadata: CreaseMetadata {
|
||||
icon_path: "icon".into(),
|
||||
label: "foo.txt".into(),
|
||||
},
|
||||
context: None,
|
||||
}];
|
||||
|
||||
let message = thread.update(cx, |thread, cx| {
|
||||
let message_id = thread.insert_user_message(
|
||||
"Tell me about @foo.txt",
|
||||
ContextLoadResult::default(),
|
||||
None,
|
||||
creases,
|
||||
cx,
|
||||
);
|
||||
thread.message(message_id).cloned().unwrap()
|
||||
});
|
||||
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.1
|
||||
.editor
|
||||
.clone();
|
||||
editor.update(cx, |editor, cx| editor.edit([(0..13, "modified")], cx));
|
||||
active_thread.confirm_editing_message(&Default::default(), window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let message = thread.update(cx, |thread, _| thread.message(message.id).cloned().unwrap());
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.1
|
||||
.editor
|
||||
.clone();
|
||||
let text = editor.update(cx, |editor, cx| editor.text(cx));
|
||||
assert_eq!(text, "modified @foo.txt");
|
||||
});
|
||||
}
|
||||
|
||||
fn init_test_settings(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
|
||||
@@ -33,9 +33,11 @@ use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::Client;
|
||||
use feature_flags::FeatureFlagAppExt as _;
|
||||
use fs::Fs;
|
||||
use gpui::{App, actions, impl_actions};
|
||||
use gpui::{App, Entity, actions, impl_actions};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{LanguageModelId, LanguageModelProviderId, LanguageModelRegistry};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry,
|
||||
};
|
||||
use prompt_store::PromptBuilder;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
@@ -115,6 +117,28 @@ impl ManageProfiles {
|
||||
|
||||
impl_actions!(agent, [NewThread, ManageProfiles]);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum ModelUsageContext {
|
||||
Thread(Entity<Thread>),
|
||||
InlineAssistant,
|
||||
}
|
||||
|
||||
impl ModelUsageContext {
|
||||
pub fn configured_model(&self, cx: &App) -> Option<ConfiguredModel> {
|
||||
match self {
|
||||
Self::Thread(thread) => thread.read(cx).configured_model(),
|
||||
Self::InlineAssistant => {
|
||||
LanguageModelRegistry::read_global(cx).inline_assistant_model()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn language_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
|
||||
self.configured_model(cx)
|
||||
.map(|configured_model| configured_model.model)
|
||||
}
|
||||
}
|
||||
|
||||
/// Initializes the `agent` crate.
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
|
||||
@@ -1086,7 +1086,7 @@ impl Render for AgentDiffToolbar {
|
||||
.child(vertical_divider())
|
||||
.when_some(editor.read(cx).workspace(), |this, _workspace| {
|
||||
this.child(
|
||||
IconButton::new("review", IconName::ListCollapse)
|
||||
IconButton::new("review", IconName::ListTodo)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::for_action_title_in(
|
||||
"Review All Files",
|
||||
@@ -1116,8 +1116,13 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let is_generating = agent_diff.read(cx).thread.read(cx).is_generating();
|
||||
if is_generating {
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_uses();
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
}
|
||||
|
||||
@@ -1507,7 +1512,7 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let new_state = if thread.read(cx).is_generating() {
|
||||
let new_state = if thread.read(cx).has_pending_edit_tool_uses() {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
|
||||
@@ -3,7 +3,7 @@ use fs::Fs;
|
||||
use gpui::{Entity, FocusHandle, SharedString};
|
||||
use picker::popover_menu::PickerPopoverMenu;
|
||||
|
||||
use crate::Thread;
|
||||
use crate::ModelUsageContext;
|
||||
use assistant_context_editor::language_model_selector::{
|
||||
LanguageModelSelector, ToggleModelSelector, language_model_selector,
|
||||
};
|
||||
@@ -12,12 +12,6 @@ use settings::update_settings_file;
|
||||
use std::sync::Arc;
|
||||
use ui::{PopoverMenuHandle, Tooltip, prelude::*};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum ModelType {
|
||||
Default(Entity<Thread>),
|
||||
InlineAssistant,
|
||||
}
|
||||
|
||||
pub struct AgentModelSelector {
|
||||
selector: Entity<LanguageModelSelector>,
|
||||
menu_handle: PopoverMenuHandle<LanguageModelSelector>,
|
||||
@@ -29,7 +23,7 @@ impl AgentModelSelector {
|
||||
fs: Arc<dyn Fs>,
|
||||
menu_handle: PopoverMenuHandle<LanguageModelSelector>,
|
||||
focus_handle: FocusHandle,
|
||||
model_type: ModelType,
|
||||
model_usage_context: ModelUsageContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -38,19 +32,14 @@ impl AgentModelSelector {
|
||||
let fs = fs.clone();
|
||||
language_model_selector(
|
||||
{
|
||||
let model_type = model_type.clone();
|
||||
move |cx| match &model_type {
|
||||
ModelType::Default(thread) => thread.read(cx).configured_model(),
|
||||
ModelType::InlineAssistant => {
|
||||
LanguageModelRegistry::read_global(cx).inline_assistant_model()
|
||||
}
|
||||
}
|
||||
let model_context = model_usage_context.clone();
|
||||
move |cx| model_context.configured_model(cx)
|
||||
},
|
||||
move |model, cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model_id = model.id().0.to_string();
|
||||
match &model_type {
|
||||
ModelType::Default(thread) => {
|
||||
match &model_usage_context {
|
||||
ModelUsageContext::Thread(thread) => {
|
||||
thread.update(cx, |thread, cx| {
|
||||
let registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(provider) = registry.provider(&model.provider_id())
|
||||
@@ -72,7 +61,7 @@ impl AgentModelSelector {
|
||||
},
|
||||
);
|
||||
}
|
||||
ModelType::InlineAssistant => {
|
||||
ModelUsageContext::InlineAssistant => {
|
||||
update_settings_file::<AgentSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
|
||||
@@ -745,6 +745,7 @@ pub struct ImageContext {
|
||||
pub enum ImageStatus {
|
||||
Loading,
|
||||
Error,
|
||||
Warning,
|
||||
Ready,
|
||||
}
|
||||
|
||||
@@ -761,11 +762,17 @@ impl ImageContext {
|
||||
self.image_task.clone().now_or_never().flatten()
|
||||
}
|
||||
|
||||
pub fn status(&self) -> ImageStatus {
|
||||
pub fn status(&self, model: Option<&Arc<dyn language_model::LanguageModel>>) -> ImageStatus {
|
||||
match self.image_task.clone().now_or_never() {
|
||||
None => ImageStatus::Loading,
|
||||
Some(None) => ImageStatus::Error,
|
||||
Some(Some(_)) => ImageStatus::Ready,
|
||||
Some(Some(_)) => {
|
||||
if model.is_some_and(|model| !model.supports_images()) {
|
||||
ImageStatus::Warning
|
||||
} else {
|
||||
ImageStatus::Ready
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -926,8 +926,9 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
&self,
|
||||
buffer: &Entity<language::Buffer>,
|
||||
position: language::Anchor,
|
||||
_: &str,
|
||||
_: bool,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
@@ -51,6 +51,10 @@ impl Tool for ContextServerTool {
|
||||
true
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
let mut schema = self.tool.input_schema.clone();
|
||||
assistant_tool::adapt_schema_to_format(&mut schema, format)?;
|
||||
|
||||
@@ -23,7 +23,7 @@ use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::ui::{AddedContext, ContextPill};
|
||||
use crate::{
|
||||
AcceptSuggestedContext, AgentPanel, FocusDown, FocusLeft, FocusRight, FocusUp,
|
||||
RemoveAllContext, RemoveFocusedContext, ToggleContextPicker,
|
||||
ModelUsageContext, RemoveAllContext, RemoveFocusedContext, ToggleContextPicker,
|
||||
};
|
||||
|
||||
pub struct ContextStrip {
|
||||
@@ -37,6 +37,7 @@ pub struct ContextStrip {
|
||||
_subscriptions: Vec<Subscription>,
|
||||
focused_index: Option<usize>,
|
||||
children_bounds: Option<Vec<Bounds<Pixels>>>,
|
||||
model_usage_context: ModelUsageContext,
|
||||
}
|
||||
|
||||
impl ContextStrip {
|
||||
@@ -47,6 +48,7 @@ impl ContextStrip {
|
||||
text_thread_store: Option<WeakEntity<TextThreadStore>>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
model_usage_context: ModelUsageContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -81,6 +83,7 @@ impl ContextStrip {
|
||||
_subscriptions: subscriptions,
|
||||
focused_index: None,
|
||||
children_bounds: None,
|
||||
model_usage_context,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,11 +101,20 @@ impl ContextStrip {
|
||||
.as_ref()
|
||||
.and_then(|thread_store| thread_store.upgrade())
|
||||
.and_then(|thread_store| thread_store.read(cx).prompt_store().as_ref());
|
||||
|
||||
let current_model = self.model_usage_context.language_model(cx);
|
||||
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.context()
|
||||
.flat_map(|context| {
|
||||
AddedContext::new_pending(context.clone(), prompt_store, project, cx)
|
||||
AddedContext::new_pending(
|
||||
context.clone(),
|
||||
prompt_store,
|
||||
project,
|
||||
current_model.as_ref(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::agent_model_selector::{AgentModelSelector, ModelType};
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::buffer_codegen::BufferCodegen;
|
||||
use crate::context::ContextCreasesAddon;
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider};
|
||||
@@ -7,12 +7,13 @@ use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::message_editor::{extract_message_creases, insert_message_creases};
|
||||
use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist};
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use assistant_context_editor::language_model_selector::ToggleModelSelector;
|
||||
use client::ErrorExt;
|
||||
use collections::VecDeque;
|
||||
use db::kvp::Dismissable;
|
||||
use editor::actions::Paste;
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
@@ -99,6 +100,7 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
|
||||
v_flex()
|
||||
.key_context("PromptEditor")
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.block_mouse_except_scroll()
|
||||
.gap_0p5()
|
||||
@@ -303,6 +305,10 @@ impl<T: 'static> PromptEditor<T> {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
crate::active_thread::attach_pasted_images_as_context(&self.context_store, cx);
|
||||
}
|
||||
|
||||
fn toggle_rate_limit_notice(
|
||||
&mut self,
|
||||
_: &ClickEvent,
|
||||
@@ -912,6 +918,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
text_thread_store.clone(),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::Thread,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -930,7 +937,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
fs,
|
||||
model_selector_menu_handle,
|
||||
prompt_editor.focus_handle(cx),
|
||||
ModelType::InlineAssistant,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1083,6 +1090,7 @@ impl PromptEditor<TerminalCodegen> {
|
||||
text_thread_store.clone(),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::Thread,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1101,7 +1109,7 @@ impl PromptEditor<TerminalCodegen> {
|
||||
fs,
|
||||
model_selector_menu_handle.clone(),
|
||||
prompt_editor.focus_handle(cx),
|
||||
ModelType::InlineAssistant,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -2,11 +2,11 @@ use std::collections::BTreeMap;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::agent_model_selector::{AgentModelSelector, ModelType};
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::context::{AgentContextKey, ContextCreasesAddon, ContextLoadResult, load_context};
|
||||
use crate::tool_compatibility::{IncompatibleToolsState, IncompatibleToolsTooltip};
|
||||
use crate::ui::{
|
||||
AnimatedLabel, MaxModeTooltip,
|
||||
MaxModeTooltip,
|
||||
preview::{AgentPreview, UsageCallout},
|
||||
};
|
||||
use agent_settings::{AgentSettings, CompletionMode};
|
||||
@@ -24,10 +24,10 @@ use fs::Fs;
|
||||
use futures::future::Shared;
|
||||
use futures::{FutureExt as _, future};
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, ClipboardEntry, Entity, EventEmitter, Focusable, Subscription,
|
||||
Task, TextStyle, WeakEntity, linear_color_stop, linear_gradient, point, pulsating_between,
|
||||
Animation, AnimationExt, App, Entity, EventEmitter, Focusable, Subscription, Task, TextStyle,
|
||||
WeakEntity, linear_color_stop, linear_gradient, point, pulsating_between,
|
||||
};
|
||||
use language::{Buffer, Language};
|
||||
use language::{Buffer, Language, Point};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModelRequestMessage, MessageContent, RequestUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
@@ -51,9 +51,9 @@ use crate::profile_selector::ProfileSelector;
|
||||
use crate::thread::{MessageCrease, Thread, TokenUsageRatio};
|
||||
use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::{
|
||||
ActiveThread, AgentDiffPane, Chat, ChatWithFollow, ExpandMessageEditor, Follow, NewThread,
|
||||
OpenAgentDiff, RemoveAllContext, ToggleBurnMode, ToggleContextPicker, ToggleProfileSelector,
|
||||
register_agent_preview,
|
||||
ActiveThread, AgentDiffPane, Chat, ChatWithFollow, ExpandMessageEditor, Follow, KeepAll,
|
||||
ModelUsageContext, NewThread, OpenAgentDiff, RejectAll, RemoveAllContext, ToggleBurnMode,
|
||||
ToggleContextPicker, ToggleProfileSelector, register_agent_preview,
|
||||
};
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
@@ -169,6 +169,7 @@ impl MessageEditor {
|
||||
Some(text_thread_store.clone()),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::File,
|
||||
ModelUsageContext::Thread(thread.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -197,7 +198,7 @@ impl MessageEditor {
|
||||
fs.clone(),
|
||||
model_selector_menu_handle,
|
||||
editor.focus_handle(cx),
|
||||
ModelType::Default(thread.clone()),
|
||||
ModelUsageContext::Thread(thread.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -431,39 +432,24 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, _: &mut Window, cx: &mut Context<Self>) {
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
return;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
self.context_store.update(cx, |store, cx| {
|
||||
for image in images {
|
||||
store.add_image_instance(Arc::new(image), cx);
|
||||
}
|
||||
});
|
||||
crate::active_thread::attach_pasted_images_as_context(&self.context_store, cx);
|
||||
}
|
||||
|
||||
fn handle_review_click(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.edits_expanded = true;
|
||||
AgentDiffPane::deploy(self.thread.clone(), self.workspace.clone(), window, cx).log_err();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_edit_bar_expand(&mut self, cx: &mut Context<Self>) {
|
||||
self.edits_expanded = !self.edits_expanded;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_file_click(
|
||||
&self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -494,6 +480,40 @@ impl MessageEditor {
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_accept_all(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.keep_all_edits(cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_reject_all(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Since there's no reject_all_edits method in the thread API,
|
||||
// we need to iterate through all buffers and reject their edits
|
||||
let action_log = self.thread.read(cx).action_log().clone();
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
for (buffer, _) in changed_buffers {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
let buffer_snapshot = buffer.read(cx);
|
||||
let start = buffer_snapshot.anchor_before(Point::new(0, 0));
|
||||
let end = buffer_snapshot.anchor_after(buffer_snapshot.max_point());
|
||||
thread
|
||||
.reject_edits_in_ranges(buffer, vec![start..end], cx)
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_max_mode_toggle(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
|
||||
let thread = self.thread.read(cx);
|
||||
let model = thread.configured_model();
|
||||
@@ -615,6 +635,12 @@ impl MessageEditor {
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::expand_message_editor))
|
||||
.on_action(cx.listener(Self::toggle_burn_mode))
|
||||
.on_action(
|
||||
cx.listener(|this, _: &KeepAll, window, cx| this.handle_accept_all(window, cx)),
|
||||
)
|
||||
.on_action(
|
||||
cx.listener(|this, _: &RejectAll, window, cx| this.handle_reject_all(window, cx)),
|
||||
)
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.gap_2()
|
||||
.p_2()
|
||||
@@ -870,7 +896,10 @@ impl MessageEditor {
|
||||
let bg_edit_files_disclosure = editor_bg_color.blend(active_color.opacity(0.3));
|
||||
|
||||
let is_edit_changes_expanded = self.edits_expanded;
|
||||
let is_generating = self.thread.read(cx).is_generating();
|
||||
let thread = self.thread.read(cx);
|
||||
let pending_edits = thread.has_pending_edit_tool_uses();
|
||||
|
||||
const EDIT_NOT_READY_TOOLTIP_LABEL: &str = "Wait until file edits are complete.";
|
||||
|
||||
v_flex()
|
||||
.mt_1()
|
||||
@@ -888,31 +917,28 @@ impl MessageEditor {
|
||||
}])
|
||||
.child(
|
||||
h_flex()
|
||||
.id("edits-container")
|
||||
.cursor_pointer()
|
||||
.p_1p5()
|
||||
.p_1()
|
||||
.justify_between()
|
||||
.when(is_edit_changes_expanded, |this| {
|
||||
this.border_b_1().border_color(border_color)
|
||||
})
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| this.handle_review_click(window, cx)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.id("edits-container")
|
||||
.cursor_pointer()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.child(
|
||||
Disclosure::new("edits-disclosure", is_edit_changes_expanded)
|
||||
.on_click(cx.listener(|this, _ev, _window, cx| {
|
||||
this.edits_expanded = !this.edits_expanded;
|
||||
cx.notify();
|
||||
.on_click(cx.listener(|this, _, _, cx| {
|
||||
this.handle_edit_bar_expand(cx)
|
||||
})),
|
||||
)
|
||||
.map(|this| {
|
||||
if is_generating {
|
||||
if pending_edits {
|
||||
this.child(
|
||||
AnimatedLabel::new(format!(
|
||||
"Editing {} {}",
|
||||
Label::new(format!(
|
||||
"Editing {} {}…",
|
||||
changed_buffers.len(),
|
||||
if changed_buffers.len() == 1 {
|
||||
"file"
|
||||
@@ -920,7 +946,15 @@ impl MessageEditor {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.with_animation(
|
||||
"edit-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.3, 0.7)),
|
||||
|label, delta| label.alpha(delta),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
this.child(
|
||||
@@ -945,23 +979,74 @@ impl MessageEditor {
|
||||
.color(Color::Muted),
|
||||
)
|
||||
}
|
||||
}),
|
||||
})
|
||||
.on_click(
|
||||
cx.listener(|this, _, _, cx| this.handle_edit_bar_expand(cx)),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Button::new("review", "Review Changes")
|
||||
.label_size(LabelSize::Small)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&OpenAgentDiff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new("review-changes", IconName::ListTodo)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Review Changes",
|
||||
&OpenAgentDiff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_review_click(window, cx)
|
||||
})),
|
||||
)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_review_click(window, cx)
|
||||
})),
|
||||
.child(ui::Divider::vertical().color(ui::DividerColor::Border))
|
||||
.child(
|
||||
Button::new("reject-all-changes", "Reject All")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.when(pending_edits, |this| {
|
||||
this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL))
|
||||
})
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&RejectAll,
|
||||
&focus_handle.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.))),
|
||||
)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_reject_all(window, cx)
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("accept-all-changes", "Accept All")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.when(pending_edits, |this| {
|
||||
this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL))
|
||||
})
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&KeepAll,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.))),
|
||||
)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_accept_all(window, cx)
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.when(is_edit_changes_expanded, |parent| {
|
||||
|
||||
@@ -871,7 +871,16 @@ impl Thread {
|
||||
self.tool_use
|
||||
.pending_tool_uses()
|
||||
.iter()
|
||||
.all(|tool_use| tool_use.status.is_error())
|
||||
.all(|pending_tool_use| pending_tool_use.status.is_error())
|
||||
}
|
||||
|
||||
/// Returns whether any pending tool uses may perform edits
|
||||
pub fn has_pending_edit_tool_uses(&self) -> bool {
|
||||
self.tool_use
|
||||
.pending_tool_uses()
|
||||
.iter()
|
||||
.filter(|pending_tool_use| !pending_tool_use.status.is_error())
|
||||
.any(|pending_tool_use| pending_tool_use.may_perform_edits)
|
||||
}
|
||||
|
||||
pub fn tool_uses_for_message(&self, id: MessageId, cx: &App) -> Vec<ToolUse> {
|
||||
@@ -1023,6 +1032,7 @@ impl Thread {
|
||||
id: MessageId,
|
||||
new_role: Role,
|
||||
new_segments: Vec<MessageSegment>,
|
||||
creases: Vec<MessageCrease>,
|
||||
loaded_context: Option<LoadedContext>,
|
||||
checkpoint: Option<GitStoreCheckpoint>,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -1032,6 +1042,7 @@ impl Thread {
|
||||
};
|
||||
message.role = new_role;
|
||||
message.segments = new_segments;
|
||||
message.creases = creases;
|
||||
if let Some(context) = loaded_context {
|
||||
message.loaded_context = context;
|
||||
}
|
||||
|
||||
@@ -70,13 +70,15 @@ impl Column for DataType {
|
||||
}
|
||||
}
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 6] = [
|
||||
const RULES_FILE_NAMES: [&'static str; 8] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
];
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
|
||||
@@ -337,6 +337,12 @@ impl ToolUseState {
|
||||
)
|
||||
.into();
|
||||
|
||||
let may_perform_edits = self
|
||||
.tools
|
||||
.read(cx)
|
||||
.tool(&tool_use.name, cx)
|
||||
.is_some_and(|tool| tool.may_perform_edits());
|
||||
|
||||
self.pending_tool_uses_by_id.insert(
|
||||
tool_use.id.clone(),
|
||||
PendingToolUse {
|
||||
@@ -345,6 +351,7 @@ impl ToolUseState {
|
||||
name: tool_use.name.clone(),
|
||||
ui_text: ui_text.clone(),
|
||||
input: tool_use.input,
|
||||
may_perform_edits,
|
||||
status,
|
||||
},
|
||||
);
|
||||
@@ -518,6 +525,7 @@ pub struct PendingToolUse {
|
||||
pub ui_text: Arc<str>,
|
||||
pub input: serde_json::Value,
|
||||
pub status: PendingToolUseStatus,
|
||||
pub may_perform_edits: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
||||
@@ -93,20 +93,9 @@ impl ContextPill {
|
||||
Self::Suggested {
|
||||
icon_path: Some(icon_path),
|
||||
..
|
||||
}
|
||||
| Self::Added {
|
||||
context:
|
||||
AddedContext {
|
||||
icon_path: Some(icon_path),
|
||||
..
|
||||
},
|
||||
..
|
||||
} => Icon::from_path(icon_path),
|
||||
Self::Suggested { kind, .. }
|
||||
| Self::Added {
|
||||
context: AddedContext { kind, .. },
|
||||
..
|
||||
} => Icon::new(kind.icon()),
|
||||
Self::Suggested { kind, .. } => Icon::new(kind.icon()),
|
||||
Self::Added { context, .. } => context.icon(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -133,6 +122,7 @@ impl RenderOnce for ContextPill {
|
||||
on_click,
|
||||
} => {
|
||||
let status_is_error = matches!(context.status, ContextStatus::Error { .. });
|
||||
let status_is_warning = matches!(context.status, ContextStatus::Warning { .. });
|
||||
|
||||
base_pill
|
||||
.pr(if on_remove.is_some() { px(2.) } else { px(4.) })
|
||||
@@ -140,6 +130,9 @@ impl RenderOnce for ContextPill {
|
||||
if status_is_error {
|
||||
pill.bg(cx.theme().status().error_background)
|
||||
.border_color(cx.theme().status().error_border)
|
||||
} else if status_is_warning {
|
||||
pill.bg(cx.theme().status().warning_background)
|
||||
.border_color(cx.theme().status().warning_border)
|
||||
} else if *focused {
|
||||
pill.bg(color.element_background)
|
||||
.border_color(color.border_focused)
|
||||
@@ -195,7 +188,8 @@ impl RenderOnce for ContextPill {
|
||||
|label, delta| label.opacity(delta),
|
||||
)
|
||||
.into_any_element(),
|
||||
ContextStatus::Error { message } => element
|
||||
ContextStatus::Warning { message }
|
||||
| ContextStatus::Error { message } => element
|
||||
.tooltip(ui::Tooltip::text(message.clone()))
|
||||
.into_any_element(),
|
||||
}),
|
||||
@@ -270,6 +264,7 @@ pub enum ContextStatus {
|
||||
Ready,
|
||||
Loading { message: SharedString },
|
||||
Error { message: SharedString },
|
||||
Warning { message: SharedString },
|
||||
}
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
@@ -285,6 +280,19 @@ pub struct AddedContext {
|
||||
}
|
||||
|
||||
impl AddedContext {
|
||||
pub fn icon(&self) -> Icon {
|
||||
match &self.status {
|
||||
ContextStatus::Warning { .. } => Icon::new(IconName::Warning).color(Color::Warning),
|
||||
ContextStatus::Error { .. } => Icon::new(IconName::XCircle).color(Color::Error),
|
||||
_ => {
|
||||
if let Some(icon_path) = &self.icon_path {
|
||||
Icon::from_path(icon_path)
|
||||
} else {
|
||||
Icon::new(self.kind.icon())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Creates an `AddedContext` by retrieving relevant details of `AgentContext`. This returns a
|
||||
/// `None` if `DirectoryContext` or `RulesContext` no longer exist.
|
||||
///
|
||||
@@ -293,6 +301,7 @@ impl AddedContext {
|
||||
handle: AgentContextHandle,
|
||||
prompt_store: Option<&Entity<PromptStore>>,
|
||||
project: &Project,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
cx: &App,
|
||||
) -> Option<AddedContext> {
|
||||
match handle {
|
||||
@@ -304,11 +313,15 @@ impl AddedContext {
|
||||
AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)),
|
||||
AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)),
|
||||
AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle, cx)),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle, model, cx)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_attached(context: &AgentContext, cx: &App) -> AddedContext {
|
||||
pub fn new_attached(
|
||||
context: &AgentContext,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
cx: &App,
|
||||
) -> AddedContext {
|
||||
match context {
|
||||
AgentContext::File(context) => Self::attached_file(context, cx),
|
||||
AgentContext::Directory(context) => Self::attached_directory(context),
|
||||
@@ -318,7 +331,7 @@ impl AddedContext {
|
||||
AgentContext::Thread(context) => Self::attached_thread(context),
|
||||
AgentContext::TextThread(context) => Self::attached_text_thread(context),
|
||||
AgentContext::Rules(context) => Self::attached_rules(context),
|
||||
AgentContext::Image(context) => Self::image(context.clone(), cx),
|
||||
AgentContext::Image(context) => Self::image(context.clone(), model, cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -593,7 +606,11 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn image(context: ImageContext, cx: &App) -> AddedContext {
|
||||
fn image(
|
||||
context: ImageContext,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
cx: &App,
|
||||
) -> AddedContext {
|
||||
let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
@@ -604,21 +621,30 @@ impl AddedContext {
|
||||
("Image".into(), None, None)
|
||||
};
|
||||
|
||||
let status = match context.status(model) {
|
||||
ImageStatus::Loading => ContextStatus::Loading {
|
||||
message: "Loading…".into(),
|
||||
},
|
||||
ImageStatus::Error => ContextStatus::Error {
|
||||
message: "Failed to load Image".into(),
|
||||
},
|
||||
ImageStatus::Warning => ContextStatus::Warning {
|
||||
message: format!(
|
||||
"{} doesn't support attaching Images as Context",
|
||||
model.map(|m| m.name().0).unwrap_or_else(|| "Model".into())
|
||||
)
|
||||
.into(),
|
||||
},
|
||||
ImageStatus::Ready => ContextStatus::Ready,
|
||||
};
|
||||
|
||||
AddedContext {
|
||||
kind: ContextKind::Image,
|
||||
name,
|
||||
parent,
|
||||
tooltip: None,
|
||||
icon_path,
|
||||
status: match context.status() {
|
||||
ImageStatus::Loading => ContextStatus::Loading {
|
||||
message: "Loading…".into(),
|
||||
},
|
||||
ImageStatus::Error => ContextStatus::Error {
|
||||
message: "Failed to load image".into(),
|
||||
},
|
||||
ImageStatus::Ready => ContextStatus::Ready,
|
||||
},
|
||||
status,
|
||||
render_hover: Some(Rc::new({
|
||||
let image = context.original_image.clone();
|
||||
move |_, cx| {
|
||||
@@ -787,6 +813,7 @@ impl Component for AddedContext {
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
},
|
||||
None,
|
||||
cx,
|
||||
),
|
||||
);
|
||||
@@ -806,6 +833,7 @@ impl Component for AddedContext {
|
||||
})
|
||||
.shared(),
|
||||
},
|
||||
None,
|
||||
cx,
|
||||
),
|
||||
);
|
||||
@@ -820,6 +848,7 @@ impl Component for AddedContext {
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
},
|
||||
None,
|
||||
cx,
|
||||
),
|
||||
);
|
||||
@@ -841,3 +870,60 @@ impl Component for AddedContext {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::App;
|
||||
use language_model::{LanguageModel, fake_provider::FakeLanguageModel};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_image_context_warning_for_unsupported_model(cx: &mut App) {
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel::default());
|
||||
assert!(!model.supports_images());
|
||||
|
||||
let image_context = ImageContext {
|
||||
context_id: ContextId::zero(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
full_path: None,
|
||||
};
|
||||
|
||||
let added_context = AddedContext::image(image_context, Some(&model), cx);
|
||||
|
||||
assert!(matches!(
|
||||
added_context.status,
|
||||
ContextStatus::Warning { .. }
|
||||
));
|
||||
|
||||
assert!(matches!(added_context.kind, ContextKind::Image));
|
||||
assert_eq!(added_context.name.as_ref(), "Image");
|
||||
assert!(added_context.parent.is_none());
|
||||
assert!(added_context.icon_path.is_none());
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_image_context_ready_for_no_model(cx: &mut App) {
|
||||
let image_context = ImageContext {
|
||||
context_id: ContextId::zero(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
full_path: None,
|
||||
};
|
||||
|
||||
let added_context = AddedContext::image(image_context, None, cx);
|
||||
|
||||
assert!(
|
||||
matches!(added_context.status, ContextStatus::Ready),
|
||||
"Expected ready status when no model provided"
|
||||
);
|
||||
|
||||
assert!(matches!(added_context.kind, ContextKind::Image));
|
||||
assert_eq!(added_context.name.as_ref(), "Image");
|
||||
assert!(added_context.parent.is_none());
|
||||
assert!(added_context.icon_path.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -342,6 +342,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
position: language::Anchor,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
@@ -13,6 +13,7 @@ path = "src/assistant_tool.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-watch.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use buffer_diff::BufferDiff;
|
||||
use collections::BTreeMap;
|
||||
use futures::{StreamExt, channel::mpsc};
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
@@ -92,21 +92,21 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let unreviewed_changes;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
unreviewed_changes = Patch::new(vec![Edit {
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
unreviewed_changes = Patch::default();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
unreviewed_changes,
|
||||
unreviewed_edits: unreviewed_edits,
|
||||
snapshot: text_snapshot.clone(),
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
@@ -175,7 +175,7 @@ impl ActionLog {
|
||||
.map_or(false, |file| file.disk_state() != DiskState::Deleted)
|
||||
{
|
||||
// If the buffer had been deleted by a tool, but it got
|
||||
// resurrected externally, we want to clear the changes we
|
||||
// resurrected externally, we want to clear the edits we
|
||||
// were tracking and reset the buffer's state.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.track_buffer_internal(buffer, false, cx);
|
||||
@@ -188,108 +188,274 @@ impl ActionLog {
|
||||
async fn maintain_diff(
|
||||
this: WeakEntity<Self>,
|
||||
buffer: Entity<Buffer>,
|
||||
mut diff_update: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
while let Some((author, buffer_snapshot)) = diff_update.next().await {
|
||||
let (rebase, diff, language, language_registry) =
|
||||
this.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(&buffer)
|
||||
.context("buffer not tracked")?;
|
||||
let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
|
||||
let git_diff = this
|
||||
.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.open_uncommitted_diff(buffer.clone(), cx)
|
||||
})
|
||||
})?
|
||||
.await
|
||||
.ok();
|
||||
let buffer_repo = git_store.read_with(cx, |git_store, cx| {
|
||||
git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
|
||||
})?;
|
||||
|
||||
let rebase = cx.background_spawn({
|
||||
let mut base_text = tracked_buffer.diff_base.clone();
|
||||
let old_snapshot = tracked_buffer.snapshot.clone();
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_changes = tracked_buffer.unreviewed_changes.clone();
|
||||
async move {
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
&unreviewed_changes,
|
||||
edits,
|
||||
&mut base_text,
|
||||
new_snapshot.as_rope(),
|
||||
);
|
||||
let (git_diff_updates_tx, mut git_diff_updates_rx) = async_watch::channel(());
|
||||
let _repo_subscription =
|
||||
if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
|
||||
cx.update(|cx| {
|
||||
let mut old_head = buffer_repo.read(cx).head_commit.clone();
|
||||
Some(cx.subscribe(git_diff, move |_, event, cx| match event {
|
||||
buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
|
||||
let new_head = buffer_repo.read(cx).head_commit.clone();
|
||||
if new_head != old_head {
|
||||
old_head = new_head;
|
||||
git_diff_updates_tx.send(()).ok();
|
||||
}
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
}
|
||||
});
|
||||
_ => {}
|
||||
}))
|
||||
})?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
anyhow::Ok((
|
||||
rebase,
|
||||
tracked_buffer.diff.clone(),
|
||||
tracked_buffer.buffer.read(cx).language().cloned(),
|
||||
tracked_buffer.buffer.read(cx).language_registry(),
|
||||
))
|
||||
})??;
|
||||
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
let diff_snapshot = BufferDiff::update_diff(
|
||||
diff.clone(),
|
||||
buffer_snapshot.clone(),
|
||||
Some(new_base_text),
|
||||
true,
|
||||
false,
|
||||
language,
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut unreviewed_changes = Patch::default();
|
||||
if let Ok(diff_snapshot) = diff_snapshot {
|
||||
unreviewed_changes = cx
|
||||
.background_spawn({
|
||||
let diff_snapshot = diff_snapshot.clone();
|
||||
let buffer_snapshot = buffer_snapshot.clone();
|
||||
let new_diff_base = new_diff_base.clone();
|
||||
async move {
|
||||
let mut unreviewed_changes = Patch::default();
|
||||
for hunk in diff_snapshot.hunks_intersecting_range(
|
||||
Anchor::MIN..Anchor::MAX,
|
||||
&buffer_snapshot,
|
||||
) {
|
||||
let old_range = new_diff_base
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
let new_range = hunk.range.start..hunk.range.end;
|
||||
unreviewed_changes.push(point_to_row_edit(
|
||||
Edit {
|
||||
old: old_range,
|
||||
new: new_range,
|
||||
},
|
||||
&new_diff_base,
|
||||
&buffer_snapshot.as_rope(),
|
||||
));
|
||||
}
|
||||
unreviewed_changes
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx)
|
||||
})?;
|
||||
loop {
|
||||
futures::select_biased! {
|
||||
buffer_update = buffer_updates.next() => {
|
||||
if let Some((author, buffer_snapshot)) = buffer_update {
|
||||
Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ = git_diff_updates_rx.changed().fuse() => {
|
||||
if let Some(git_diff) = git_diff.as_ref() {
|
||||
Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(&buffer)
|
||||
.context("buffer not tracked")?;
|
||||
tracked_buffer.diff_base = new_diff_base;
|
||||
tracked_buffer.snapshot = buffer_snapshot;
|
||||
tracked_buffer.unreviewed_changes = unreviewed_changes;
|
||||
cx.notify();
|
||||
anyhow::Ok(())
|
||||
})??;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn track_edits(
|
||||
this: &WeakEntity<ActionLog>,
|
||||
buffer: &Entity<Buffer>,
|
||||
author: ChangeAuthor,
|
||||
buffer_snapshot: text::BufferSnapshot,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let rebase = this.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
|
||||
let rebase = cx.background_spawn({
|
||||
let mut base_text = tracked_buffer.diff_base.clone();
|
||||
let old_snapshot = tracked_buffer.snapshot.clone();
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
async move {
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
edits,
|
||||
&mut base_text,
|
||||
new_snapshot.as_rope(),
|
||||
);
|
||||
}
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(rebase)
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
Self::update_diff(
|
||||
this,
|
||||
buffer,
|
||||
buffer_snapshot,
|
||||
new_base_text,
|
||||
new_diff_base,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn keep_committed_edits(
|
||||
this: &WeakEntity<ActionLog>,
|
||||
buffer: &Entity<Buffer>,
|
||||
git_diff: &Entity<BufferDiff>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let buffer_snapshot = this.read_with(cx, |this, _cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
anyhow::Ok(tracked_buffer.snapshot.clone())
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = this
|
||||
.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let agent_diff_base = tracked_buffer.diff_base.clone();
|
||||
let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
|
||||
let buffer_text = tracked_buffer.snapshot.as_rope().clone();
|
||||
anyhow::Ok(cx.background_spawn(async move {
|
||||
let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
|
||||
let committed_edits = language::line_diff(
|
||||
&agent_diff_base.to_string(),
|
||||
&git_diff_base.to_string(),
|
||||
)
|
||||
.into_iter()
|
||||
.map(|(old, new)| Edit { old, new });
|
||||
|
||||
let mut new_agent_diff_base = agent_diff_base.clone();
|
||||
let mut row_delta = 0i32;
|
||||
for committed in committed_edits {
|
||||
while let Some(unreviewed) = old_unreviewed_edits.peek() {
|
||||
// If the committed edit matches the unreviewed
|
||||
// edit, assume the user wants to keep it.
|
||||
if committed.old == unreviewed.old {
|
||||
let unreviewed_new =
|
||||
buffer_text.slice_rows(unreviewed.new.clone()).to_string();
|
||||
let committed_new =
|
||||
git_diff_base.slice_rows(committed.new.clone()).to_string();
|
||||
if unreviewed_new == committed_new {
|
||||
let old_byte_start =
|
||||
new_agent_diff_base.point_to_offset(Point::new(
|
||||
(unreviewed.old.start as i32 + row_delta) as u32,
|
||||
0,
|
||||
));
|
||||
let old_byte_end =
|
||||
new_agent_diff_base.point_to_offset(cmp::min(
|
||||
Point::new(
|
||||
(unreviewed.old.end as i32 + row_delta) as u32,
|
||||
0,
|
||||
),
|
||||
new_agent_diff_base.max_point(),
|
||||
));
|
||||
new_agent_diff_base
|
||||
.replace(old_byte_start..old_byte_end, &unreviewed_new);
|
||||
row_delta +=
|
||||
unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
|
||||
}
|
||||
} else if unreviewed.old.start >= committed.old.end {
|
||||
break;
|
||||
}
|
||||
|
||||
old_unreviewed_edits.next().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
Arc::new(new_agent_diff_base.to_string()),
|
||||
new_agent_diff_base,
|
||||
)
|
||||
}))
|
||||
})??
|
||||
.await;
|
||||
|
||||
Self::update_diff(
|
||||
this,
|
||||
buffer,
|
||||
buffer_snapshot,
|
||||
new_base_text,
|
||||
new_diff_base,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn update_diff(
|
||||
this: &WeakEntity<ActionLog>,
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_snapshot: text::BufferSnapshot,
|
||||
new_base_text: Arc<String>,
|
||||
new_diff_base: Rope,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
anyhow::Ok((
|
||||
tracked_buffer.diff.clone(),
|
||||
buffer.read(cx).language().cloned(),
|
||||
buffer.read(cx).language_registry().clone(),
|
||||
))
|
||||
})??;
|
||||
let diff_snapshot = BufferDiff::update_diff(
|
||||
diff.clone(),
|
||||
buffer_snapshot.clone(),
|
||||
Some(new_base_text),
|
||||
true,
|
||||
false,
|
||||
language,
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
let mut unreviewed_edits = Patch::default();
|
||||
if let Ok(diff_snapshot) = diff_snapshot {
|
||||
unreviewed_edits = cx
|
||||
.background_spawn({
|
||||
let diff_snapshot = diff_snapshot.clone();
|
||||
let buffer_snapshot = buffer_snapshot.clone();
|
||||
let new_diff_base = new_diff_base.clone();
|
||||
async move {
|
||||
let mut unreviewed_edits = Patch::default();
|
||||
for hunk in diff_snapshot
|
||||
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
|
||||
{
|
||||
let old_range = new_diff_base
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
let new_range = hunk.range.start..hunk.range.end;
|
||||
unreviewed_edits.push(point_to_row_edit(
|
||||
Edit {
|
||||
old: old_range,
|
||||
new: new_range,
|
||||
},
|
||||
&new_diff_base,
|
||||
&buffer_snapshot.as_rope(),
|
||||
));
|
||||
}
|
||||
unreviewed_edits
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
|
||||
})?;
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
tracked_buffer.diff_base = new_diff_base;
|
||||
tracked_buffer.snapshot = buffer_snapshot;
|
||||
tracked_buffer.unreviewed_edits = unreviewed_edits;
|
||||
cx.notify();
|
||||
anyhow::Ok(())
|
||||
})?
|
||||
}
|
||||
|
||||
/// Track a buffer as read, so we can notify the model about user edits.
|
||||
pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.track_buffer_internal(buffer, false, cx);
|
||||
@@ -350,7 +516,7 @@ impl ActionLog {
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
let mut delta = 0i32;
|
||||
|
||||
tracked_buffer.unreviewed_changes.retain_mut(|edit| {
|
||||
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
|
||||
edit.old.start = (edit.old.start as i32 + delta) as u32;
|
||||
edit.old.end = (edit.old.end as i32 + delta) as u32;
|
||||
|
||||
@@ -461,7 +627,7 @@ impl ActionLog {
|
||||
.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
|
||||
// Clear all tracked changes for this buffer and start over as if we just read it.
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
@@ -477,7 +643,7 @@ impl ActionLog {
|
||||
.peekable();
|
||||
|
||||
let mut edits_to_revert = Vec::new();
|
||||
for edit in tracked_buffer.unreviewed_changes.edits() {
|
||||
for edit in tracked_buffer.unreviewed_edits.edits() {
|
||||
let new_range = tracked_buffer
|
||||
.snapshot
|
||||
.anchor_before(Point::new(edit.new.start, 0))
|
||||
@@ -529,7 +695,7 @@ impl ActionLog {
|
||||
.retain(|_buffer, tracked_buffer| match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
tracked_buffer.unreviewed_changes.clear();
|
||||
tracked_buffer.unreviewed_edits.clear();
|
||||
tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
@@ -538,11 +704,11 @@ impl ActionLog {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
/// Returns the set of buffers that contain changes that haven't been reviewed by the user.
|
||||
/// Returns the set of buffers that contain edits that haven't been reviewed by the user.
|
||||
pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
|
||||
self.tracked_buffers
|
||||
.iter()
|
||||
.filter(|(_, tracked)| tracked.has_changes(cx))
|
||||
.filter(|(_, tracked)| tracked.has_edits(cx))
|
||||
.map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
|
||||
.collect()
|
||||
}
|
||||
@@ -662,11 +828,7 @@ fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edi
|
||||
old: edit.old.start.row + 1..edit.old.end.row + 1,
|
||||
new: edit.new.start.row + 1..edit.new.end.row + 1,
|
||||
}
|
||||
} else if edit.old.start.column == 0
|
||||
&& edit.old.end.column == 0
|
||||
&& edit.new.end.column == 0
|
||||
&& edit.old.end != old_text.max_point()
|
||||
{
|
||||
} else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
|
||||
Edit {
|
||||
old: edit.old.start.row..edit.old.end.row,
|
||||
new: edit.new.start.row..edit.new.end.row,
|
||||
@@ -694,7 +856,7 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
unreviewed_changes: Patch<u32>,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
@@ -706,7 +868,7 @@ struct TrackedBuffer {
|
||||
}
|
||||
|
||||
impl TrackedBuffer {
|
||||
fn has_changes(&self, cx: &App) -> bool {
|
||||
fn has_edits(&self, cx: &App) -> bool {
|
||||
self.diff
|
||||
.read(cx)
|
||||
.hunks(&self.buffer.read(cx), cx)
|
||||
@@ -727,8 +889,6 @@ pub struct ChangedBuffer {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
@@ -737,6 +897,7 @@ mod tests {
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::env;
|
||||
use util::{RandomCharIter, path};
|
||||
|
||||
#[ctor::ctor]
|
||||
@@ -1751,15 +1912,15 @@ mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
_ => {
|
||||
let is_agent_change = rng.gen_bool(0.5);
|
||||
if is_agent_change {
|
||||
let is_agent_edit = rng.gen_bool(0.5);
|
||||
if is_agent_edit {
|
||||
log::info!("agent edit");
|
||||
} else {
|
||||
log::info!("user edit");
|
||||
}
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
|
||||
if is_agent_change {
|
||||
if is_agent_edit {
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
}
|
||||
});
|
||||
@@ -1784,7 +1945,7 @@ mod tests {
|
||||
let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
|
||||
let mut old_text = tracked_buffer.diff_base.clone();
|
||||
let new_text = buffer.read(cx).as_rope();
|
||||
for edit in tracked_buffer.unreviewed_changes.edits() {
|
||||
for edit in tracked_buffer.unreviewed_edits.edits() {
|
||||
let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
let old_end = old_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.new.start + edit.old_len(), 0),
|
||||
@@ -1800,6 +1961,171 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
".git": {},
|
||||
"file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
|
||||
"0000000",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path(path!("/project/file.txt"), cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
// Edit at the very start: a -> A
|
||||
(Point::new(0, 0)..Point::new(0, 1), "A"),
|
||||
// Deletion in the middle: remove lines d and e
|
||||
(Point::new(3, 0)..Point::new(5, 0), ""),
|
||||
// Modification: g -> GGG
|
||||
(Point::new(6, 0)..Point::new(6, 1), "GGG"),
|
||||
// Addition: insert new line after h
|
||||
(Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
|
||||
// Edit the very last character: j -> J
|
||||
(Point::new(9, 0)..Point::new(9, 1), "J"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(0, 0)..Point::new(1, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "a\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(3, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Deleted,
|
||||
old_text: "d\ne\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(4, 0)..Point::new(5, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "g\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(6, 0)..Point::new(7, 0),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(8, 0)..Point::new(8, 1),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "j".into()
|
||||
}
|
||||
]
|
||||
)]
|
||||
);
|
||||
|
||||
// Simulate a git commit that matches some edits but not others:
|
||||
// - Accepts the first edit (a -> A)
|
||||
// - Accepts the deletion (remove d and e)
|
||||
// - Makes a different change to g (g -> G instead of GGG)
|
||||
// - Ignores the NEW line addition
|
||||
// - Ignores the last line edit (j stays as j)
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
|
||||
"0000001",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(4, 0)..Point::new(5, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "g\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(6, 0)..Point::new(7, 0),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(8, 0)..Point::new(8, 1),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "j".into()
|
||||
}
|
||||
]
|
||||
)]
|
||||
);
|
||||
|
||||
// Make another commit that accepts the NEW line but with different content
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[(
|
||||
"file.txt".into(),
|
||||
"A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
|
||||
)],
|
||||
"0000002",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(6, 0)..Point::new(7, 0),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(8, 0)..Point::new(8, 1),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "j".into()
|
||||
}
|
||||
]
|
||||
)]
|
||||
);
|
||||
|
||||
// Final commit that accepts all remaining edits
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
|
||||
"0000003",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct HunkStatus {
|
||||
range: Range<Point>,
|
||||
|
||||
@@ -218,6 +218,9 @@ pub trait Tool: 'static + Send + Sync {
|
||||
/// before having permission to run.
|
||||
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
|
||||
|
||||
/// Returns true if the tool may perform edits.
|
||||
fn may_perform_edits(&self) -> bool;
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self, _: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
Ok(serde_json::Value::Object(serde_json::Map::default()))
|
||||
|
||||
@@ -16,11 +16,24 @@ pub fn adapt_schema_to_format(
|
||||
}
|
||||
|
||||
match format {
|
||||
LanguageModelToolSchemaFormat::JsonSchema => Ok(()),
|
||||
LanguageModelToolSchemaFormat::JsonSchema => preprocess_json_schema(json),
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset => adapt_to_json_schema_subset(json),
|
||||
}
|
||||
}
|
||||
|
||||
fn preprocess_json_schema(json: &mut Value) -> Result<()> {
|
||||
// `additionalProperties` defaults to `false` unless explicitly specified.
|
||||
// This prevents models from hallucinating tool parameters.
|
||||
if let Value::Object(obj) = json {
|
||||
if let Some(Value::String(type_str)) = obj.get("type") {
|
||||
if type_str == "object" && !obj.contains_key("additionalProperties") {
|
||||
obj.insert("additionalProperties".to_string(), Value::Bool(false));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tries to adapt the json schema so that it is compatible with https://ai.google.dev/api/caching#Schema
|
||||
fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
||||
if let Value::Object(obj) = json {
|
||||
@@ -237,4 +250,59 @@ mod tests {
|
||||
|
||||
assert!(adapt_to_json_schema_subset(&mut json).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preprocess_json_schema_adds_additional_properties() {
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
preprocess_json_schema(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preprocess_json_schema_preserves_additional_properties() {
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
});
|
||||
|
||||
preprocess_json_schema(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,13 +37,13 @@ use crate::diagnostics_tool::DiagnosticsTool;
|
||||
use crate::edit_file_tool::EditFileTool;
|
||||
use crate::fetch_tool::FetchTool;
|
||||
use crate::find_path_tool::FindPathTool;
|
||||
use crate::grep_tool::GrepTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::thinking_tool::ThinkingTool;
|
||||
|
||||
pub use edit_file_tool::{EditFileMode, EditFileToolInput};
|
||||
pub use find_path_tool::FindPathToolInput;
|
||||
pub use grep_tool::{GrepTool, GrepToolInput};
|
||||
pub use open_tool::OpenTool;
|
||||
pub use read_file_tool::{ReadFileTool, ReadFileToolInput};
|
||||
pub use terminal_tool::TerminalTool;
|
||||
@@ -126,6 +126,7 @@ mod tests {
|
||||
}
|
||||
},
|
||||
"required": ["location"],
|
||||
"additionalProperties": false
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -48,6 +48,10 @@ impl Tool for CopyPathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./copy_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -33,12 +33,16 @@ impl Tool for CreateDirectoryTool {
|
||||
"create_directory".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./create_directory_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./create_directory_tool/description.md").into()
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
|
||||
@@ -37,6 +37,10 @@ impl Tool for DeletePathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./delete_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -50,6 +50,10 @@ impl Tool for DiagnosticsTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./diagnostics_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ impl Template for EditFilePromptTemplate {
|
||||
pub enum EditAgentOutputEvent {
|
||||
ResolvingEditRange(Range<Anchor>),
|
||||
UnresolvedEditRange,
|
||||
AmbiguousEditRange(Vec<Range<usize>>),
|
||||
Edited,
|
||||
}
|
||||
|
||||
@@ -238,7 +239,7 @@ impl EditAgent {
|
||||
let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, cx);
|
||||
let mut edit_events = edit_events.peekable();
|
||||
while let Some(edit_event) = Pin::new(&mut edit_events).peek().await {
|
||||
// Salta gli eventi finché non siamo all'inizio di una nuova modifica.
|
||||
// Skip events until we're at the start of a new edit.
|
||||
let Ok(EditParserEvent::OldTextChunk { .. }) = edit_event else {
|
||||
edit_events.next().await.unwrap()?;
|
||||
continue;
|
||||
@@ -246,8 +247,8 @@ impl EditAgent {
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
|
||||
// Risolvi il vecchio testo in background, aggiornando la posizione
|
||||
// dell'agente mentre continuiamo a perfezionare a quale intervallo corrisponde.
|
||||
// Resolve the old text in the background, updating the agent
|
||||
// location as we keep refining which range it corresponds to.
|
||||
let (resolve_old_text, mut old_range) =
|
||||
Self::resolve_old_text(snapshot.text.clone(), edit_events, cx);
|
||||
while let Ok(old_range) = old_range.recv().await {
|
||||
@@ -269,16 +270,29 @@ impl EditAgent {
|
||||
}
|
||||
}
|
||||
|
||||
let (edit_events_, resolved_old_text) = resolve_old_text.await?;
|
||||
let (edit_events_, mut resolved_old_text) = resolve_old_text.await?;
|
||||
edit_events = edit_events_;
|
||||
|
||||
// If we can't resolve the old text, restart the loop waiting for a
|
||||
// new edit (or for the stream to end).
|
||||
let Some(resolved_old_text) = resolved_old_text else {
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::UnresolvedEditRange)
|
||||
.ok();
|
||||
continue;
|
||||
let resolved_old_text = match resolved_old_text.len() {
|
||||
1 => resolved_old_text.pop().unwrap(),
|
||||
0 => {
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::UnresolvedEditRange)
|
||||
.ok();
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
let ranges = resolved_old_text
|
||||
.into_iter()
|
||||
.map(|text| text.range)
|
||||
.collect();
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::AmbiguousEditRange(ranges))
|
||||
.ok();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Compute edits in the background and apply them as they become
|
||||
@@ -405,7 +419,7 @@ impl EditAgent {
|
||||
mut edit_events: T,
|
||||
cx: &mut AsyncApp,
|
||||
) -> (
|
||||
Task<Result<(T, Option<ResolvedOldText>)>>,
|
||||
Task<Result<(T, Vec<ResolvedOldText>)>>,
|
||||
async_watch::Receiver<Option<Range<usize>>>,
|
||||
)
|
||||
where
|
||||
@@ -425,21 +439,29 @@ impl EditAgent {
|
||||
}
|
||||
}
|
||||
|
||||
let old_range = matcher.finish();
|
||||
old_range_tx.send(old_range.clone())?;
|
||||
if let Some(old_range) = old_range {
|
||||
let line_indent =
|
||||
LineIndent::from_iter(matcher.query_lines().first().unwrap().chars());
|
||||
Ok((
|
||||
edit_events,
|
||||
Some(ResolvedOldText {
|
||||
range: old_range,
|
||||
indent: line_indent,
|
||||
}),
|
||||
))
|
||||
let matches = matcher.finish();
|
||||
|
||||
let old_range = if matches.len() == 1 {
|
||||
matches.first()
|
||||
} else {
|
||||
Ok((edit_events, None))
|
||||
}
|
||||
// No matches or multiple ambiguous matches
|
||||
None
|
||||
};
|
||||
old_range_tx.send(old_range.cloned())?;
|
||||
|
||||
let indent = LineIndent::from_iter(
|
||||
matcher
|
||||
.query_lines()
|
||||
.first()
|
||||
.unwrap_or(&String::new())
|
||||
.chars(),
|
||||
);
|
||||
let resolved_old_texts = matches
|
||||
.into_iter()
|
||||
.map(|range| ResolvedOldText { range, indent })
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok((edit_events, resolved_old_texts))
|
||||
});
|
||||
|
||||
(task, old_range_rx)
|
||||
@@ -726,9 +748,6 @@ mod tests {
|
||||
);
|
||||
cx.run_until_parked();
|
||||
|
||||
// !talk: Questo è un test unitario più tradizionale.
|
||||
// !talk: È randomizzato, ma ancora fondamentalmente deterministico.
|
||||
// !talk: Ma comunque rilevante per lavorare con un LLM
|
||||
simulate_llm_output(
|
||||
&agent,
|
||||
indoc! {"
|
||||
@@ -752,7 +771,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
// !talk: Really interesting unit test - Again about purely algorithmic code but critical to performance on the task.
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_indentation(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
let agent = init_test(cx).await;
|
||||
@@ -1326,6 +1344,76 @@ mod tests {
|
||||
EditAgent::new(model, project, action_log, Templates::new())
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_non_unique_text_error(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
let agent = init_test(cx).await;
|
||||
let original_text = indoc! {"
|
||||
function foo() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function bar() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function baz() {
|
||||
return 42;
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new(|cx| Buffer::local(original_text, cx));
|
||||
let (apply, mut events) = agent.edit(
|
||||
buffer.clone(),
|
||||
String::new(),
|
||||
&LanguageModelRequest::default(),
|
||||
&mut cx.to_async(),
|
||||
);
|
||||
cx.run_until_parked();
|
||||
|
||||
// When <old_text> matches text in more than one place
|
||||
simulate_llm_output(
|
||||
&agent,
|
||||
indoc! {"
|
||||
<old_text>
|
||||
return 42;
|
||||
</old_text>
|
||||
<new_text>
|
||||
return 100;
|
||||
</new_text>
|
||||
"},
|
||||
&mut rng,
|
||||
cx,
|
||||
);
|
||||
apply.await.unwrap();
|
||||
|
||||
// Then the text should remain unchanged
|
||||
let result_text = buffer.read_with(cx, |buffer, _| buffer.snapshot().text());
|
||||
assert_eq!(
|
||||
result_text,
|
||||
indoc! {"
|
||||
function foo() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function bar() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function baz() {
|
||||
return 42;
|
||||
}
|
||||
"},
|
||||
"Text should remain unchanged when there are multiple matches"
|
||||
);
|
||||
|
||||
// And AmbiguousEditRange even should be emitted
|
||||
let events = drain_events(&mut events);
|
||||
let ambiguous_ranges = vec![17..31, 52..66, 87..101];
|
||||
assert!(
|
||||
events.contains(&EditAgentOutputEvent::AmbiguousEditRange(ambiguous_ranges)),
|
||||
"Should emit AmbiguousEditRange for non-unique text"
|
||||
);
|
||||
}
|
||||
|
||||
fn drain_events(
|
||||
stream: &mut UnboundedReceiver<EditAgentOutputEvent>,
|
||||
) -> Vec<EditAgentOutputEvent> {
|
||||
|
||||
@@ -75,8 +75,6 @@ impl EditParser {
|
||||
chunk.pop();
|
||||
}
|
||||
|
||||
// !talk: We're tolerant of mismatched tags because we couldn't get this to zero
|
||||
// !talk: Seems like things are more likely on distribution if the model gets this right, but we don't really know.
|
||||
self.metrics.tags += 1;
|
||||
if &self.buffer[tag_range.clone()] != OLD_TEXT_END_TAG {
|
||||
self.metrics.mismatched_tags += 1;
|
||||
@@ -335,7 +333,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
// !talk: This is the traditional randomized test on the parser covering the last N%.
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_mismatched_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
|
||||
@@ -160,7 +160,6 @@ fn eval_delete_run_git_blame() {
|
||||
);
|
||||
}
|
||||
|
||||
// !talk: Go here after zoomed out eval.
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_translate_doc_comments() {
|
||||
@@ -177,7 +176,7 @@ fn eval_translate_doc_comments() {
|
||||
let input_file_content = include_str!("evals/fixtures/translate_doc_comments/before.rs");
|
||||
let edit_description = "Translate all doc comments to Italian";
|
||||
eval(
|
||||
100,
|
||||
200,
|
||||
1.,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
@@ -1350,11 +1349,9 @@ fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
|
||||
);
|
||||
}
|
||||
|
||||
// !talk: Here's a blanket assertion we added to the eval tracking the presence of mismatched tags
|
||||
// !talk: It's run on every eval because it's a cross cutting concern.
|
||||
let mismatched_tag_ratio =
|
||||
cumulative_parser_metrics.mismatched_tags as f32 / cumulative_parser_metrics.tags as f32;
|
||||
if mismatched_tag_ratio > 0.05 {
|
||||
if mismatched_tag_ratio > 0.10 {
|
||||
for eval_output in eval_outputs {
|
||||
println!("{}", eval_output);
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ pub struct StreamingFuzzyMatcher {
|
||||
snapshot: TextBufferSnapshot,
|
||||
query_lines: Vec<String>,
|
||||
incomplete_line: String,
|
||||
best_match: Option<Range<usize>>,
|
||||
best_matches: Vec<Range<usize>>,
|
||||
matrix: SearchMatrix,
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ impl StreamingFuzzyMatcher {
|
||||
snapshot,
|
||||
query_lines: Vec::new(),
|
||||
incomplete_line: String::new(),
|
||||
best_match: None,
|
||||
best_matches: Vec::new(),
|
||||
matrix: SearchMatrix::new(buffer_line_count + 1),
|
||||
}
|
||||
}
|
||||
@@ -55,31 +55,41 @@ impl StreamingFuzzyMatcher {
|
||||
|
||||
self.incomplete_line.replace_range(..last_pos + 1, "");
|
||||
|
||||
self.best_match = self.resolve_location_fuzzy();
|
||||
}
|
||||
self.best_matches = self.resolve_location_fuzzy();
|
||||
|
||||
self.best_match.clone()
|
||||
if let Some(first_match) = self.best_matches.first() {
|
||||
Some(first_match.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
if let Some(first_match) = self.best_matches.first() {
|
||||
Some(first_match.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Finish processing and return the final best match.
|
||||
/// Finish processing and return the final best match(es).
|
||||
///
|
||||
/// This processes any remaining incomplete line before returning the final
|
||||
/// match result.
|
||||
pub fn finish(&mut self) -> Option<Range<usize>> {
|
||||
pub fn finish(&mut self) -> Vec<Range<usize>> {
|
||||
// Process any remaining incomplete line
|
||||
if !self.incomplete_line.is_empty() {
|
||||
self.query_lines.push(self.incomplete_line.clone());
|
||||
self.best_match = self.resolve_location_fuzzy();
|
||||
self.incomplete_line.clear();
|
||||
self.best_matches = self.resolve_location_fuzzy();
|
||||
}
|
||||
|
||||
self.best_match.clone()
|
||||
self.best_matches.clone()
|
||||
}
|
||||
|
||||
fn resolve_location_fuzzy(&mut self) -> Option<Range<usize>> {
|
||||
fn resolve_location_fuzzy(&mut self) -> Vec<Range<usize>> {
|
||||
let new_query_line_count = self.query_lines.len();
|
||||
let old_query_line_count = self.matrix.rows.saturating_sub(1);
|
||||
if new_query_line_count == old_query_line_count {
|
||||
return None;
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
self.matrix.resize_rows(new_query_line_count + 1);
|
||||
@@ -132,53 +142,61 @@ impl StreamingFuzzyMatcher {
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
// Find all matches with the best cost
|
||||
let buffer_line_count = self.snapshot.max_point().row as usize + 1;
|
||||
let mut buffer_row_end = buffer_line_count as u32;
|
||||
let mut best_cost = u32::MAX;
|
||||
let mut matches_with_best_cost = Vec::new();
|
||||
|
||||
for col in 1..=buffer_line_count {
|
||||
let cost = self.matrix.get(new_query_line_count, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
buffer_row_end = col as u32;
|
||||
matches_with_best_cost.clear();
|
||||
matches_with_best_cost.push(col as u32);
|
||||
} else if cost == best_cost {
|
||||
matches_with_best_cost.push(col as u32);
|
||||
}
|
||||
}
|
||||
|
||||
let mut matched_lines = 0;
|
||||
let mut query_row = new_query_line_count;
|
||||
let mut buffer_row_start = buffer_row_end;
|
||||
while query_row > 0 && buffer_row_start > 0 {
|
||||
let current = self.matrix.get(query_row, buffer_row_start as usize);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_row -= 1;
|
||||
buffer_row_start -= 1;
|
||||
matched_lines += 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_row -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_row_start -= 1;
|
||||
// Find ranges for the matches
|
||||
let mut valid_matches = Vec::new();
|
||||
for &buffer_row_end in &matches_with_best_cost {
|
||||
let mut matched_lines = 0;
|
||||
let mut query_row = new_query_line_count;
|
||||
let mut buffer_row_start = buffer_row_end;
|
||||
while query_row > 0 && buffer_row_start > 0 {
|
||||
let current = self.matrix.get(query_row, buffer_row_start as usize);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_row -= 1;
|
||||
buffer_row_start -= 1;
|
||||
matched_lines += 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_row -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_row_start -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let matched_buffer_row_count = buffer_row_end - buffer_row_start;
|
||||
let matched_ratio = matched_lines as f32
|
||||
/ (matched_buffer_row_count as f32).max(new_query_line_count as f32);
|
||||
if matched_ratio >= 0.8 {
|
||||
let buffer_start_ix = self
|
||||
.snapshot
|
||||
.point_to_offset(Point::new(buffer_row_start, 0));
|
||||
let buffer_end_ix = self.snapshot.point_to_offset(Point::new(
|
||||
buffer_row_end - 1,
|
||||
self.snapshot.line_len(buffer_row_end - 1),
|
||||
));
|
||||
valid_matches.push((buffer_row_start, buffer_start_ix..buffer_end_ix));
|
||||
}
|
||||
}
|
||||
|
||||
let matched_buffer_row_count = buffer_row_end - buffer_row_start;
|
||||
let matched_ratio = matched_lines as f32
|
||||
/ (matched_buffer_row_count as f32).max(new_query_line_count as f32);
|
||||
if matched_ratio >= 0.8 {
|
||||
let buffer_start_ix = self
|
||||
.snapshot
|
||||
.point_to_offset(Point::new(buffer_row_start, 0));
|
||||
let buffer_end_ix = self.snapshot.point_to_offset(Point::new(
|
||||
buffer_row_end - 1,
|
||||
self.snapshot.line_len(buffer_row_end - 1),
|
||||
));
|
||||
Some(buffer_start_ix..buffer_end_ix)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
valid_matches.into_iter().map(|(_, range)| range).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -638,28 +656,35 @@ mod tests {
|
||||
matcher.push(chunk);
|
||||
}
|
||||
|
||||
let result = matcher.finish();
|
||||
let actual_ranges = matcher.finish();
|
||||
|
||||
// If no expected ranges, we expect no match
|
||||
if expected_ranges.is_empty() {
|
||||
assert_eq!(
|
||||
result, None,
|
||||
assert!(
|
||||
actual_ranges.is_empty(),
|
||||
"Expected no match for query: {:?}, but found: {:?}",
|
||||
query, result
|
||||
query,
|
||||
actual_ranges
|
||||
);
|
||||
} else {
|
||||
let mut actual_ranges = Vec::new();
|
||||
if let Some(range) = result {
|
||||
actual_ranges.push(range);
|
||||
}
|
||||
|
||||
let text_with_actual_range = generate_marked_text(&text, &actual_ranges, false);
|
||||
pretty_assertions::assert_eq!(
|
||||
text_with_actual_range,
|
||||
text_with_expected_range,
|
||||
"Query: {:?}, Chunks: {:?}",
|
||||
indoc! {"
|
||||
Query: {:?}
|
||||
Chunks: {:?}
|
||||
Expected marked text: {}
|
||||
Actual marked text: {}
|
||||
Expected ranges: {:?}
|
||||
Actual ranges: {:?}"
|
||||
},
|
||||
query,
|
||||
chunks
|
||||
chunks,
|
||||
text_with_expected_range,
|
||||
text_with_actual_range,
|
||||
expected_ranges,
|
||||
actual_ranges
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -687,8 +712,11 @@ mod tests {
|
||||
|
||||
fn finish(mut finder: StreamingFuzzyMatcher) -> Option<String> {
|
||||
let snapshot = finder.snapshot.clone();
|
||||
finder
|
||||
.finish()
|
||||
.map(|range| snapshot.text_for_range(range).collect::<String>())
|
||||
let matches = finder.finish();
|
||||
if let Some(range) = matches.first() {
|
||||
Some(snapshot.text_for_range(range.clone()).collect::<String>())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::{
|
||||
Templates,
|
||||
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
|
||||
schema::json_schema_for,
|
||||
ui::{COLLAPSED_LINES, ToolOutputPreview},
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{
|
||||
@@ -13,7 +14,7 @@ use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between, px,
|
||||
};
|
||||
use indoc::formatdoc;
|
||||
use language::{
|
||||
@@ -128,6 +129,10 @@ impl Tool for EditFileTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("edit_file_tool/description.md").to_string()
|
||||
}
|
||||
@@ -234,6 +239,7 @@ impl Tool for EditFileTool {
|
||||
};
|
||||
|
||||
let mut hallucinated_old_text = false;
|
||||
let mut ambiguous_ranges = Vec::new();
|
||||
while let Some(event) = events.next().await {
|
||||
match event {
|
||||
EditAgentOutputEvent::Edited => {
|
||||
@@ -242,6 +248,7 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
}
|
||||
EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true,
|
||||
EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges,
|
||||
EditAgentOutputEvent::ResolvingEditRange(range) => {
|
||||
if let Some(card) = card_clone.as_ref() {
|
||||
card.update(cx, |card, cx| card.reveal_range(range, cx))?;
|
||||
@@ -324,6 +331,17 @@ impl Tool for EditFileTool {
|
||||
I can perform the requested edits.
|
||||
"}
|
||||
);
|
||||
anyhow::ensure!(
|
||||
ambiguous_ranges.is_empty(),
|
||||
// TODO: Include ambiguous_ranges, converted to line numbers.
|
||||
// This would work best if we add `line_hint` parameter
|
||||
// to edit_file_tool
|
||||
formatdoc! {"
|
||||
<old_text> matches more than one position in the file. Read the
|
||||
relevant sections of {input_path} again and extend <old_text> so
|
||||
that I can perform the requested edits.
|
||||
"}
|
||||
);
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text("No edits were made.".into()),
|
||||
output: serde_json::to_value(output).ok(),
|
||||
@@ -884,30 +902,8 @@ impl ToolCard for EditFileToolCard {
|
||||
(element.into_any_element(), line_height)
|
||||
});
|
||||
|
||||
let (full_height_icon, full_height_tooltip_label) = if self.full_height_expanded {
|
||||
(IconName::ChevronUp, "Collapse Code Block")
|
||||
} else {
|
||||
(IconName::ChevronDown, "Expand Code Block")
|
||||
};
|
||||
|
||||
let gradient_overlay =
|
||||
div()
|
||||
.absolute()
|
||||
.bottom_0()
|
||||
.left_0()
|
||||
.w_full()
|
||||
.h_2_5()
|
||||
.bg(gpui::linear_gradient(
|
||||
0.,
|
||||
gpui::linear_color_stop(cx.theme().colors().editor_background, 0.),
|
||||
gpui::linear_color_stop(cx.theme().colors().editor_background.opacity(0.), 1.),
|
||||
));
|
||||
|
||||
let border_color = cx.theme().colors().border.opacity(0.6);
|
||||
|
||||
const DEFAULT_COLLAPSED_LINES: u32 = 10;
|
||||
let is_collapsible = self.total_lines.unwrap_or(0) > DEFAULT_COLLAPSED_LINES;
|
||||
|
||||
let waiting_for_diff = {
|
||||
let styles = [
|
||||
("w_4_5", (0.1, 0.85), 2000),
|
||||
@@ -992,48 +988,34 @@ impl ToolCard for EditFileToolCard {
|
||||
card.child(waiting_for_diff)
|
||||
})
|
||||
.when(self.preview_expanded && !self.is_loading(), |card| {
|
||||
let editor_view = v_flex()
|
||||
.relative()
|
||||
.h_full()
|
||||
.when(!self.full_height_expanded, |editor_container| {
|
||||
editor_container.max_h(px(COLLAPSED_LINES as f32 * editor_line_height.0))
|
||||
})
|
||||
.overflow_hidden()
|
||||
.border_t_1()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(editor);
|
||||
|
||||
card.child(
|
||||
v_flex()
|
||||
.relative()
|
||||
.h_full()
|
||||
.when(!self.full_height_expanded, |editor_container| {
|
||||
editor_container
|
||||
.max_h(DEFAULT_COLLAPSED_LINES as f32 * editor_line_height)
|
||||
})
|
||||
.overflow_hidden()
|
||||
.border_t_1()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(editor)
|
||||
.when(
|
||||
!self.full_height_expanded && is_collapsible,
|
||||
|editor_container| editor_container.child(gradient_overlay),
|
||||
),
|
||||
ToolOutputPreview::new(editor_view.into_any_element(), self.editor.entity_id())
|
||||
.with_total_lines(self.total_lines.unwrap_or(0) as usize)
|
||||
.toggle_state(self.full_height_expanded)
|
||||
.with_collapsed_fade()
|
||||
.on_toggle({
|
||||
let this = cx.entity().downgrade();
|
||||
move |is_expanded, _window, cx| {
|
||||
if let Some(this) = this.upgrade() {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.full_height_expanded = is_expanded;
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(is_collapsible, |card| {
|
||||
card.child(
|
||||
h_flex()
|
||||
.id(("expand-button", self.editor.entity_id()))
|
||||
.flex_none()
|
||||
.cursor_pointer()
|
||||
.h_5()
|
||||
.justify_center()
|
||||
.border_t_1()
|
||||
.rounded_b_md()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.hover(|style| style.bg(cx.theme().colors().element_hover.opacity(0.1)))
|
||||
.child(
|
||||
Icon::new(full_height_icon)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.tooltip(Tooltip::text(full_height_tooltip_label))
|
||||
.on_click(cx.listener(move |this, _event, _window, _cx| {
|
||||
this.full_height_expanded = !this.full_height_expanded;
|
||||
})),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,7 +118,11 @@ impl Tool for FetchTool {
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
|
||||
@@ -59,6 +59,10 @@ impl Tool for FindPathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./find_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -6,11 +6,12 @@ use gpui::{AnyWindowHandle, App, Entity, Task};
|
||||
use language::{OffsetRangeExt, ParseStatus, Point};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use project::{
|
||||
Project,
|
||||
Project, WorktreeSettings,
|
||||
search::{SearchQuery, SearchResult},
|
||||
};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::{cmp, fmt::Write, sync::Arc};
|
||||
use ui::IconName;
|
||||
use util::RangeExt;
|
||||
@@ -60,6 +61,10 @@ impl Tool for GrepTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./grep_tool/description.md").into()
|
||||
}
|
||||
@@ -126,6 +131,23 @@ impl Tool for GrepTool {
|
||||
}
|
||||
};
|
||||
|
||||
// Exclude global file_scan_exclusions and private_files settings
|
||||
let exclude_matcher = {
|
||||
let global_settings = WorktreeSettings::get_global(cx);
|
||||
let exclude_patterns = global_settings
|
||||
.file_scan_exclusions
|
||||
.sources()
|
||||
.iter()
|
||||
.chain(global_settings.private_files.sources().iter());
|
||||
|
||||
match PathMatcher::new(exclude_patterns) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("invalid exclude pattern: {error}"))).into();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let query = match SearchQuery::regex(
|
||||
&input.regex,
|
||||
false,
|
||||
@@ -133,7 +155,7 @@ impl Tool for GrepTool {
|
||||
false,
|
||||
false,
|
||||
include_matcher,
|
||||
PathMatcher::default(), // For now, keep it simple and don't enable an exclude pattern.
|
||||
exclude_matcher,
|
||||
true, // Always match file include pattern against *full project paths* that start with a project root.
|
||||
None,
|
||||
) {
|
||||
@@ -156,12 +178,24 @@ impl Tool for GrepTool {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (Some(path), mut parse_status) = buffer.read_with(cx, |buffer, cx| {
|
||||
let Ok((Some(path), mut parse_status)) = buffer.read_with(cx, |buffer, cx| {
|
||||
(buffer.file().map(|file| file.full_path(cx)), buffer.parse_status())
|
||||
})? else {
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Check if this file should be excluded based on its worktree settings
|
||||
if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| {
|
||||
project.find_project_path(&path, cx)
|
||||
}) {
|
||||
if cx.update(|cx| {
|
||||
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
|
||||
worktree_settings.is_path_excluded(&project_path.path)
|
||||
|| worktree_settings.is_path_private(&project_path.path)
|
||||
}).unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
@@ -280,10 +314,11 @@ impl Tool for GrepTool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use project::{FakeFs, Project, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use unindent::Unindent;
|
||||
use util::path;
|
||||
@@ -295,7 +330,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor().clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
serde_json::json!({
|
||||
"src": {
|
||||
"main.rs": "fn main() {\n println!(\"Hello, world!\");\n}",
|
||||
@@ -383,7 +418,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor().clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
serde_json::json!({
|
||||
"case_test.txt": "This file has UPPERCASE and lowercase text.\nUPPERCASE patterns should match only with case_sensitive: true",
|
||||
}),
|
||||
@@ -464,7 +499,7 @@ mod tests {
|
||||
|
||||
// Create test file with syntax structures
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
serde_json::json!({
|
||||
"test_syntax.rs": r#"
|
||||
fn top_level_function() {
|
||||
@@ -785,4 +820,488 @@ mod tests {
|
||||
.with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_grep_security_boundaries(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
fs.insert_tree(
|
||||
path!("/"),
|
||||
json!({
|
||||
"project_root": {
|
||||
"allowed_file.rs": "fn main() { println!(\"This file is in the project\"); }",
|
||||
".mysecrets": "SECRET_KEY=abc123\nfn secret() { /* private */ }",
|
||||
".secretdir": {
|
||||
"config": "fn special_configuration() { /* excluded */ }"
|
||||
},
|
||||
".mymetadata": "fn custom_metadata() { /* excluded */ }",
|
||||
"subdir": {
|
||||
"normal_file.rs": "fn normal_file_content() { /* Normal */ }",
|
||||
"special.privatekey": "fn private_key_content() { /* private */ }",
|
||||
"data.mysensitive": "fn sensitive_data() { /* private */ }"
|
||||
}
|
||||
},
|
||||
"outside_project": {
|
||||
"sensitive_file.rs": "fn outside_function() { /* This file is outside the project */ }"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.update(|cx| {
|
||||
use gpui::UpdateGlobal;
|
||||
use project::WorktreeSettings;
|
||||
use settings::SettingsStore;
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions = Some(vec![
|
||||
"**/.secretdir".to_string(),
|
||||
"**/.mymetadata".to_string(),
|
||||
]);
|
||||
settings.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
|
||||
// Searching for files outside the project worktree should return no results
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "outside_function"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not find files outside the project worktree"
|
||||
);
|
||||
|
||||
// Searching within the project should succeed
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "main"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("allowed_file.rs")),
|
||||
"grep_tool should be able to search files inside worktrees"
|
||||
);
|
||||
|
||||
// Searching files that match file_scan_exclusions should return no results
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "special_configuration"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search files in .secretdir (file_scan_exclusions)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "custom_metadata"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .mymetadata files (file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Searching private files should return no results
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "SECRET_KEY"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .mysecrets (private_files)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "private_key_content"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .privatekey files (private_files)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "sensitive_data"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .mysensitive files (private_files)"
|
||||
);
|
||||
|
||||
// Searching a normal file should still work, even with private_files configured
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "normal_file_content"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("normal_file.rs")),
|
||||
"Should be able to search normal files"
|
||||
);
|
||||
|
||||
// Path traversal attempts with .. in include_pattern should not escape project
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "outside_function",
|
||||
"include_pattern": "../outside_project/**/*.rs"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not allow escaping project boundaries with relative paths"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_grep_with_multiple_worktree_settings(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
// Create first worktree with its own private files
|
||||
fs.insert_tree(
|
||||
path!("/worktree1"),
|
||||
json!({
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/fixture.*"],
|
||||
"private_files": ["**/secret.rs"]
|
||||
}"#
|
||||
},
|
||||
"src": {
|
||||
"main.rs": "fn main() { let secret_key = \"hidden\"; }",
|
||||
"secret.rs": "const API_KEY: &str = \"secret_value\";",
|
||||
"utils.rs": "pub fn get_config() -> String { \"config\".to_string() }"
|
||||
},
|
||||
"tests": {
|
||||
"test.rs": "fn test_secret() { assert!(true); }",
|
||||
"fixture.sql": "SELECT * FROM secret_table;"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Create second worktree with different private files
|
||||
fs.insert_tree(
|
||||
path!("/worktree2"),
|
||||
json!({
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/internal.*"],
|
||||
"private_files": ["**/private.js", "**/data.json"]
|
||||
}"#
|
||||
},
|
||||
"lib": {
|
||||
"public.js": "export function getSecret() { return 'public'; }",
|
||||
"private.js": "const SECRET_KEY = \"private_value\";",
|
||||
"data.json": "{\"secret_data\": \"hidden\"}"
|
||||
},
|
||||
"docs": {
|
||||
"README.md": "# Documentation with secret info",
|
||||
"internal.md": "Internal secret documentation"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Set global settings
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.private_files = Some(vec!["**/.env".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(
|
||||
fs.clone(),
|
||||
[path!("/worktree1").as_ref(), path!("/worktree2").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Wait for worktrees to be fully scanned
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
|
||||
// Search for "secret" - should exclude files based on worktree-specific settings
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "secret",
|
||||
"case_sensitive": false
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
let paths = extract_paths_from_results(&content);
|
||||
|
||||
// Should find matches in non-private files
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("main.rs")),
|
||||
"Should find 'secret' in worktree1/src/main.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("test.rs")),
|
||||
"Should find 'secret' in worktree1/tests/test.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("public.js")),
|
||||
"Should find 'secret' in worktree2/lib/public.js"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("README.md")),
|
||||
"Should find 'secret' in worktree2/docs/README.md"
|
||||
);
|
||||
|
||||
// Should NOT find matches in private/excluded files based on worktree settings
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("secret.rs")),
|
||||
"Should not search in worktree1/src/secret.rs (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("fixture.sql")),
|
||||
"Should not search in worktree1/tests/fixture.sql (local file_scan_exclusions)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("private.js")),
|
||||
"Should not search in worktree2/lib/private.js (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("data.json")),
|
||||
"Should not search in worktree2/lib/data.json (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("internal.md")),
|
||||
"Should not search in worktree2/docs/internal.md (local file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Test with `include_pattern` specific to one worktree
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "secret",
|
||||
"include_pattern": "worktree1/**/*.rs"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
let paths = extract_paths_from_results(&content);
|
||||
|
||||
// Should only find matches in worktree1 *.rs files (excluding private ones)
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("main.rs")),
|
||||
"Should find match in worktree1/src/main.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("test.rs")),
|
||||
"Should find match in worktree1/tests/test.rs"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("secret.rs")),
|
||||
"Should not find match in excluded worktree1/src/secret.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().all(|p| !p.contains("worktree2")),
|
||||
"Should not find any matches in worktree2"
|
||||
);
|
||||
}
|
||||
|
||||
// Helper function to extract file paths from grep results
|
||||
fn extract_paths_from_results(results: &str) -> Vec<String> {
|
||||
results
|
||||
.lines()
|
||||
.filter(|line| line.starts_with("## Matches in "))
|
||||
.map(|line| {
|
||||
line.strip_prefix("## Matches in ")
|
||||
.unwrap()
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,3 +6,4 @@ Searches the contents of files in the project with a regular expression
|
||||
- Never use this tool to search for paths. Only search file contents with this tool.
|
||||
- Use this tool when you need to find files containing specific patterns
|
||||
- Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages.
|
||||
- DO NOT use HTML entities solely to escape characters in the tool parameters.
|
||||
|
||||
@@ -3,9 +3,10 @@ use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::{AnyWindowHandle, App, Entity, Task};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use project::Project;
|
||||
use project::{Project, WorktreeSettings};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::{fmt::Write, path::Path, sync::Arc};
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
@@ -48,6 +49,10 @@ impl Tool for ListDirectoryTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./list_directory_tool/description.md").into()
|
||||
}
|
||||
@@ -115,21 +120,80 @@ impl Tool for ListDirectoryTool {
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("Worktree not found"))).into();
|
||||
};
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
let Some(entry) = worktree.entry_for_path(&project_path.path) else {
|
||||
// Check if the directory whose contents we're listing is itself excluded or private
|
||||
let global_settings = WorktreeSettings::get_global(cx);
|
||||
if global_settings.is_path_excluded(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot list directory because its path matches the user's global `file_scan_exclusions` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
if global_settings.is_path_private(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot list directory because its path matches the user's global `private_files` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
|
||||
if worktree_settings.is_path_excluded(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot list directory because its path matches the user's worktree`file_scan_exclusions` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
if worktree_settings.is_path_private(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot list directory because its path matches the user's worktree `private_paths` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
let worktree_snapshot = worktree.read(cx).snapshot();
|
||||
let worktree_root_name = worktree.read(cx).root_name().to_string();
|
||||
|
||||
let Some(entry) = worktree_snapshot.entry_for_path(&project_path.path) else {
|
||||
return Task::ready(Err(anyhow!("Path not found: {}", input.path))).into();
|
||||
};
|
||||
|
||||
if !entry.is_dir() {
|
||||
return Task::ready(Err(anyhow!("{} is not a directory.", input.path))).into();
|
||||
}
|
||||
let worktree_snapshot = worktree.read(cx).snapshot();
|
||||
|
||||
let mut folders = Vec::new();
|
||||
let mut files = Vec::new();
|
||||
|
||||
for entry in worktree.child_entries(&project_path.path) {
|
||||
let full_path = Path::new(worktree.root_name())
|
||||
for entry in worktree_snapshot.child_entries(&project_path.path) {
|
||||
// Skip private and excluded files and directories
|
||||
if global_settings.is_path_private(&entry.path)
|
||||
|| global_settings.is_path_excluded(&entry.path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if project
|
||||
.read(cx)
|
||||
.find_project_path(&entry.path, cx)
|
||||
.map(|project_path| {
|
||||
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
|
||||
|
||||
worktree_settings.is_path_excluded(&project_path.path)
|
||||
|| worktree_settings.is_path_private(&project_path.path)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let full_path = Path::new(&worktree_root_name)
|
||||
.join(&entry.path)
|
||||
.display()
|
||||
.to_string();
|
||||
@@ -162,10 +226,10 @@ impl Tool for ListDirectoryTool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal};
|
||||
use indoc::indoc;
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use project::{FakeFs, Project, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
@@ -193,7 +257,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/project",
|
||||
path!("/project"),
|
||||
json!({
|
||||
"src": {
|
||||
"main.rs": "fn main() {}",
|
||||
@@ -323,7 +387,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/project",
|
||||
path!("/project"),
|
||||
json!({
|
||||
"empty_dir": {}
|
||||
}),
|
||||
@@ -355,7 +419,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/project",
|
||||
path!("/project"),
|
||||
json!({
|
||||
"file.txt": "content"
|
||||
}),
|
||||
@@ -408,4 +472,394 @@ mod tests {
|
||||
.contains("is not a directory")
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_list_directory_security(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
"normal_dir": {
|
||||
"file1.txt": "content",
|
||||
"file2.txt": "content"
|
||||
},
|
||||
".mysecrets": "SECRET_KEY=abc123",
|
||||
".secretdir": {
|
||||
"config": "special configuration",
|
||||
"secret.txt": "secret content"
|
||||
},
|
||||
".mymetadata": "custom metadata",
|
||||
"visible_dir": {
|
||||
"normal.txt": "normal content",
|
||||
"special.privatekey": "private key content",
|
||||
"data.mysensitive": "sensitive data",
|
||||
".hidden_subdir": {
|
||||
"hidden_file.txt": "hidden content"
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Configure settings explicitly
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions = Some(vec![
|
||||
"**/.secretdir".to_string(),
|
||||
"**/.mymetadata".to_string(),
|
||||
"**/.hidden_subdir".to_string(),
|
||||
]);
|
||||
settings.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let tool = Arc::new(ListDirectoryTool);
|
||||
|
||||
// Listing root directory should exclude private and excluded files
|
||||
let input = json!({
|
||||
"path": "project"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
|
||||
// Should include normal directories
|
||||
assert!(content.contains("normal_dir"), "Should list normal_dir");
|
||||
assert!(content.contains("visible_dir"), "Should list visible_dir");
|
||||
|
||||
// Should NOT include excluded or private files
|
||||
assert!(
|
||||
!content.contains(".secretdir"),
|
||||
"Should not list .secretdir (file_scan_exclusions)"
|
||||
);
|
||||
assert!(
|
||||
!content.contains(".mymetadata"),
|
||||
"Should not list .mymetadata (file_scan_exclusions)"
|
||||
);
|
||||
assert!(
|
||||
!content.contains(".mysecrets"),
|
||||
"Should not list .mysecrets (private_files)"
|
||||
);
|
||||
|
||||
// Trying to list an excluded directory should fail
|
||||
let input = json!({
|
||||
"path": "project/.secretdir"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"Should not be able to list excluded directory"
|
||||
);
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("file_scan_exclusions"),
|
||||
"Error should mention file_scan_exclusions"
|
||||
);
|
||||
|
||||
// Listing a directory should exclude private files within it
|
||||
let input = json!({
|
||||
"path": "project/visible_dir"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
|
||||
// Should include normal files
|
||||
assert!(content.contains("normal.txt"), "Should list normal.txt");
|
||||
|
||||
// Should NOT include private files
|
||||
assert!(
|
||||
!content.contains("privatekey"),
|
||||
"Should not list .privatekey files (private_files)"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("mysensitive"),
|
||||
"Should not list .mysensitive files (private_files)"
|
||||
);
|
||||
|
||||
// Should NOT include subdirectories that match exclusions
|
||||
assert!(
|
||||
!content.contains(".hidden_subdir"),
|
||||
"Should not list .hidden_subdir (file_scan_exclusions)"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_list_directory_with_multiple_worktree_settings(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
// Create first worktree with its own private files
|
||||
fs.insert_tree(
|
||||
path!("/worktree1"),
|
||||
json!({
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/fixture.*"],
|
||||
"private_files": ["**/secret.rs", "**/config.toml"]
|
||||
}"#
|
||||
},
|
||||
"src": {
|
||||
"main.rs": "fn main() { println!(\"Hello from worktree1\"); }",
|
||||
"secret.rs": "const API_KEY: &str = \"secret_key_1\";",
|
||||
"config.toml": "[database]\nurl = \"postgres://localhost/db1\""
|
||||
},
|
||||
"tests": {
|
||||
"test.rs": "mod tests { fn test_it() {} }",
|
||||
"fixture.sql": "CREATE TABLE users (id INT, name VARCHAR(255));"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Create second worktree with different private files
|
||||
fs.insert_tree(
|
||||
path!("/worktree2"),
|
||||
json!({
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/internal.*"],
|
||||
"private_files": ["**/private.js", "**/data.json"]
|
||||
}"#
|
||||
},
|
||||
"lib": {
|
||||
"public.js": "export function greet() { return 'Hello from worktree2'; }",
|
||||
"private.js": "const SECRET_TOKEN = \"private_token_2\";",
|
||||
"data.json": "{\"api_key\": \"json_secret_key\"}"
|
||||
},
|
||||
"docs": {
|
||||
"README.md": "# Public Documentation",
|
||||
"internal.md": "# Internal Secrets and Configuration"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Set global settings
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.private_files = Some(vec!["**/.env".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(
|
||||
fs.clone(),
|
||||
[path!("/worktree1").as_ref(), path!("/worktree2").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Wait for worktrees to be fully scanned
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let tool = Arc::new(ListDirectoryTool);
|
||||
|
||||
// Test listing worktree1/src - should exclude secret.rs and config.toml based on local settings
|
||||
let input = json!({
|
||||
"path": "worktree1/src"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
assert!(content.contains("main.rs"), "Should list main.rs");
|
||||
assert!(
|
||||
!content.contains("secret.rs"),
|
||||
"Should not list secret.rs (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("config.toml"),
|
||||
"Should not list config.toml (local private_files)"
|
||||
);
|
||||
|
||||
// Test listing worktree1/tests - should exclude fixture.sql based on local settings
|
||||
let input = json!({
|
||||
"path": "worktree1/tests"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
assert!(content.contains("test.rs"), "Should list test.rs");
|
||||
assert!(
|
||||
!content.contains("fixture.sql"),
|
||||
"Should not list fixture.sql (local file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Test listing worktree2/lib - should exclude private.js and data.json based on local settings
|
||||
let input = json!({
|
||||
"path": "worktree2/lib"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
assert!(content.contains("public.js"), "Should list public.js");
|
||||
assert!(
|
||||
!content.contains("private.js"),
|
||||
"Should not list private.js (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!content.contains("data.json"),
|
||||
"Should not list data.json (local private_files)"
|
||||
);
|
||||
|
||||
// Test listing worktree2/docs - should exclude internal.md based on local settings
|
||||
let input = json!({
|
||||
"path": "worktree2/docs"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
assert!(content.contains("README.md"), "Should list README.md");
|
||||
assert!(
|
||||
!content.contains("internal.md"),
|
||||
"Should not list internal.md (local file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Test trying to list an excluded directory directly
|
||||
let input = json!({
|
||||
"path": "worktree1/src/secret.rs"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
// This should fail because we're trying to list a file, not a directory
|
||||
assert!(result.is_err(), "Should fail when trying to list a file");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,10 @@ impl Tool for MovePathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./move_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -37,6 +37,10 @@ impl Tool for NowTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Returns the current datetime in RFC 3339 format. Only use this tool when the user specifically asks for it or the current task would benefit from knowing the current datetime.".into()
|
||||
}
|
||||
|
||||
@@ -26,7 +26,9 @@ impl Tool for OpenTool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn description(&self) -> String {
|
||||
include_str!("./open_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
@@ -12,9 +12,10 @@ use language::{Anchor, Point};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelImage, LanguageModelRequest, LanguageModelToolSchemaFormat,
|
||||
};
|
||||
use project::{AgentLocation, Project};
|
||||
use project::{AgentLocation, Project, WorktreeSettings};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
@@ -58,6 +59,10 @@ impl Tool for ReadFileTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./read_file_tool/description.md").into()
|
||||
}
|
||||
@@ -103,12 +108,48 @@ impl Tool for ReadFileTool {
|
||||
return Task::ready(Err(anyhow!("Path {} not found in project", &input.path))).into();
|
||||
};
|
||||
|
||||
// Error out if this path is either excluded or private in global settings
|
||||
let global_settings = WorktreeSettings::get_global(cx);
|
||||
if global_settings.is_path_excluded(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot read file because its path matches the global `file_scan_exclusions` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
if global_settings.is_path_private(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot read file because its path matches the global `private_files` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
// Error out if this path is either excluded or private in worktree settings
|
||||
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
|
||||
if worktree_settings.is_path_excluded(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot read file because its path matches the worktree `file_scan_exclusions` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
if worktree_settings.is_path_private(&project_path.path) {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Cannot read file because its path matches the worktree `private_files` setting: {}",
|
||||
&input.path
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
let file_path = input.path.clone();
|
||||
|
||||
if image_store::is_image_file(&project, &project_path, cx) {
|
||||
if !model.supports_images() {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Attempted to read an image, but Zed doesn't currently sending images to {}.",
|
||||
"Attempted to read an image, but Zed doesn't currently support sending images to {}.",
|
||||
model.name().0
|
||||
)))
|
||||
.into();
|
||||
@@ -248,10 +289,10 @@ impl Tool for ReadFileTool {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use project::{FakeFs, Project, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
@@ -261,7 +302,7 @@ mod test {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
fs.insert_tree(path!("/root"), json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
@@ -295,7 +336,7 @@ mod test {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"small_file.txt": "This is a small file content"
|
||||
}),
|
||||
@@ -334,7 +375,7 @@ mod test {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"large_file.rs": (0..1000).map(|i| format!("struct Test{} {{\n a: u32,\n b: usize,\n}}", i)).collect::<Vec<_>>().join("\n")
|
||||
}),
|
||||
@@ -425,7 +466,7 @@ mod test {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"multiline.txt": "Line 1\nLine 2\nLine 3\nLine 4\nLine 5"
|
||||
}),
|
||||
@@ -466,7 +507,7 @@ mod test {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"multiline.txt": "Line 1\nLine 2\nLine 3\nLine 4\nLine 5"
|
||||
}),
|
||||
@@ -597,4 +638,544 @@ mod test {
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_read_file_security(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
fs.insert_tree(
|
||||
path!("/"),
|
||||
json!({
|
||||
"project_root": {
|
||||
"allowed_file.txt": "This file is in the project",
|
||||
".mysecrets": "SECRET_KEY=abc123",
|
||||
".secretdir": {
|
||||
"config": "special configuration"
|
||||
},
|
||||
".mymetadata": "custom metadata",
|
||||
"subdir": {
|
||||
"normal_file.txt": "Normal file content",
|
||||
"special.privatekey": "private key content",
|
||||
"data.mysensitive": "sensitive data"
|
||||
}
|
||||
},
|
||||
"outside_project": {
|
||||
"sensitive_file.txt": "This file is outside the project"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.update(|cx| {
|
||||
use gpui::UpdateGlobal;
|
||||
use project::WorktreeSettings;
|
||||
use settings::SettingsStore;
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions = Some(vec![
|
||||
"**/.secretdir".to_string(),
|
||||
"**/.mymetadata".to_string(),
|
||||
]);
|
||||
settings.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
|
||||
// Reading a file outside the project worktree should fail
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "/outside_project/sensitive_file.txt"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read an absolute path outside a worktree"
|
||||
);
|
||||
|
||||
// Reading a file within the project should succeed
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/allowed_file.txt"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"read_file_tool should be able to read files inside worktrees"
|
||||
);
|
||||
|
||||
// Reading files that match file_scan_exclusions should fail
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/.secretdir/config"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read files in .secretdir (file_scan_exclusions)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/.mymetadata"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read .mymetadata files (file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Reading private files should fail
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/.mysecrets"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read .mysecrets (private_files)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/subdir/special.privatekey"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read .privatekey files (private_files)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/subdir/data.mysensitive"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read .mysensitive files (private_files)"
|
||||
);
|
||||
|
||||
// Reading a normal file should still work, even with private_files configured
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/subdir/normal_file.txt"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(result.is_ok(), "Should be able to read normal files");
|
||||
assert_eq!(
|
||||
result.unwrap().content.as_str().unwrap(),
|
||||
"Normal file content"
|
||||
);
|
||||
|
||||
// Path traversal attempts with .. should fail
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"path": "project_root/../outside_project/sensitive_file.txt"
|
||||
});
|
||||
Arc::new(ReadFileTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"read_file_tool should error when attempting to read a relative path that resolves to outside a worktree"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_read_file_with_multiple_worktree_settings(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
// Create first worktree with its own private_files setting
|
||||
fs.insert_tree(
|
||||
path!("/worktree1"),
|
||||
json!({
|
||||
"src": {
|
||||
"main.rs": "fn main() { println!(\"Hello from worktree1\"); }",
|
||||
"secret.rs": "const API_KEY: &str = \"secret_key_1\";",
|
||||
"config.toml": "[database]\nurl = \"postgres://localhost/db1\""
|
||||
},
|
||||
"tests": {
|
||||
"test.rs": "mod tests { fn test_it() {} }",
|
||||
"fixture.sql": "CREATE TABLE users (id INT, name VARCHAR(255));"
|
||||
},
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/fixture.*"],
|
||||
"private_files": ["**/secret.rs", "**/config.toml"]
|
||||
}"#
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Create second worktree with different private_files setting
|
||||
fs.insert_tree(
|
||||
path!("/worktree2"),
|
||||
json!({
|
||||
"lib": {
|
||||
"public.js": "export function greet() { return 'Hello from worktree2'; }",
|
||||
"private.js": "const SECRET_TOKEN = \"private_token_2\";",
|
||||
"data.json": "{\"api_key\": \"json_secret_key\"}"
|
||||
},
|
||||
"docs": {
|
||||
"README.md": "# Public Documentation",
|
||||
"internal.md": "# Internal Secrets and Configuration"
|
||||
},
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/internal.*"],
|
||||
"private_files": ["**/private.js", "**/data.json"]
|
||||
}"#
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Set global settings
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.private_files = Some(vec!["**/.env".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(
|
||||
fs.clone(),
|
||||
[path!("/worktree1").as_ref(), path!("/worktree2").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let tool = Arc::new(ReadFileTool);
|
||||
|
||||
// Test reading allowed files in worktree1
|
||||
let input = json!({
|
||||
"path": "worktree1/src/main.rs"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result.content.as_str().unwrap(),
|
||||
"fn main() { println!(\"Hello from worktree1\"); }"
|
||||
);
|
||||
|
||||
// Test reading private file in worktree1 should fail
|
||||
let input = json!({
|
||||
"path": "worktree1/src/secret.rs"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("worktree `private_files` setting"),
|
||||
"Error should mention worktree private_files setting"
|
||||
);
|
||||
|
||||
// Test reading excluded file in worktree1 should fail
|
||||
let input = json!({
|
||||
"path": "worktree1/tests/fixture.sql"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("worktree `file_scan_exclusions` setting"),
|
||||
"Error should mention worktree file_scan_exclusions setting"
|
||||
);
|
||||
|
||||
// Test reading allowed files in worktree2
|
||||
let input = json!({
|
||||
"path": "worktree2/lib/public.js"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result.content.as_str().unwrap(),
|
||||
"export function greet() { return 'Hello from worktree2'; }"
|
||||
);
|
||||
|
||||
// Test reading private file in worktree2 should fail
|
||||
let input = json!({
|
||||
"path": "worktree2/lib/private.js"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("worktree `private_files` setting"),
|
||||
"Error should mention worktree private_files setting"
|
||||
);
|
||||
|
||||
// Test reading excluded file in worktree2 should fail
|
||||
let input = json!({
|
||||
"path": "worktree2/docs/internal.md"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("worktree `file_scan_exclusions` setting"),
|
||||
"Error should mention worktree file_scan_exclusions setting"
|
||||
);
|
||||
|
||||
// Test that files allowed in one worktree but not in another are handled correctly
|
||||
// (e.g., config.toml is private in worktree1 but doesn't exist in worktree2)
|
||||
let input = json!({
|
||||
"path": "worktree1/src/config.toml"
|
||||
});
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
tool.clone().run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.output
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("worktree `private_files` setting"),
|
||||
"Config.toml should be blocked by worktree1's private_files setting"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use crate::{
|
||||
schema::json_schema_for,
|
||||
ui::{COLLAPSED_LINES, ToolOutputPreview},
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus};
|
||||
use futures::{FutureExt as _, future::Shared};
|
||||
@@ -25,7 +28,7 @@ use terminal_view::TerminalView;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{Disclosure, Tooltip, prelude::*};
|
||||
use util::{
|
||||
get_system_shell, markdown::MarkdownInlineCode, size::format_file_size,
|
||||
ResultExt, get_system_shell, markdown::MarkdownInlineCode, size::format_file_size,
|
||||
time::duration_alt_display,
|
||||
};
|
||||
use workspace::Workspace;
|
||||
@@ -77,6 +80,10 @@ impl Tool for TerminalTool {
|
||||
true
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./terminal_tool/description.md").to_string()
|
||||
}
|
||||
@@ -254,22 +261,24 @@ impl Tool for TerminalTool {
|
||||
|
||||
let terminal_view = window.update(cx, |_, window, cx| {
|
||||
cx.new(|cx| {
|
||||
TerminalView::new(
|
||||
let mut view = TerminalView::new(
|
||||
terminal.clone(),
|
||||
workspace.downgrade(),
|
||||
None,
|
||||
project.downgrade(),
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
);
|
||||
view.set_embedded_mode(None, cx);
|
||||
view
|
||||
})
|
||||
})?;
|
||||
|
||||
let _ = card.update(cx, |card, _| {
|
||||
card.update(cx, |card, _| {
|
||||
card.terminal = Some(terminal_view.clone());
|
||||
card.start_instant = Instant::now();
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
|
||||
let exit_status = terminal
|
||||
.update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?
|
||||
@@ -285,7 +294,7 @@ impl Tool for TerminalTool {
|
||||
exit_status.map(portable_pty::ExitStatus::from),
|
||||
);
|
||||
|
||||
let _ = card.update(cx, |card, _| {
|
||||
card.update(cx, |card, _| {
|
||||
card.command_finished = true;
|
||||
card.exit_status = exit_status;
|
||||
card.was_content_truncated = processed_content.len() < previous_len;
|
||||
@@ -293,7 +302,8 @@ impl Tool for TerminalTool {
|
||||
card.content_line_count = content_line_count;
|
||||
card.finished_with_empty_output = finished_with_empty_output;
|
||||
card.elapsed_time = Some(card.start_instant.elapsed());
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
|
||||
Ok(processed_content.into())
|
||||
}
|
||||
@@ -473,7 +483,6 @@ impl ToolCard for TerminalToolCard {
|
||||
let time_elapsed = self
|
||||
.elapsed_time
|
||||
.unwrap_or_else(|| self.start_instant.elapsed());
|
||||
let should_hide_terminal = tool_failed || self.finished_with_empty_output;
|
||||
|
||||
let header_bg = cx
|
||||
.theme()
|
||||
@@ -574,7 +583,7 @@ impl ToolCard for TerminalToolCard {
|
||||
),
|
||||
)
|
||||
})
|
||||
.when(!should_hide_terminal, |header| {
|
||||
.when(!self.finished_with_empty_output, |header| {
|
||||
header.child(
|
||||
Disclosure::new(
|
||||
("terminal-tool-disclosure", self.entity_id),
|
||||
@@ -618,19 +627,43 @@ impl ToolCard for TerminalToolCard {
|
||||
),
|
||||
),
|
||||
)
|
||||
.when(self.preview_expanded && !should_hide_terminal, |this| {
|
||||
this.child(
|
||||
div()
|
||||
.pt_2()
|
||||
.min_h_72()
|
||||
.border_t_1()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_b_md()
|
||||
.text_ui_sm(cx)
|
||||
.child(terminal.clone()),
|
||||
)
|
||||
})
|
||||
.when(
|
||||
self.preview_expanded && !self.finished_with_empty_output,
|
||||
|this| {
|
||||
this.child(
|
||||
div()
|
||||
.pt_2()
|
||||
.border_t_1()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_b_md()
|
||||
.text_ui_sm(cx)
|
||||
.child(
|
||||
ToolOutputPreview::new(
|
||||
terminal.clone().into_any_element(),
|
||||
terminal.entity_id(),
|
||||
)
|
||||
.with_total_lines(self.content_line_count)
|
||||
.toggle_state(!terminal.read(cx).is_content_limited(window))
|
||||
.on_toggle({
|
||||
let terminal = terminal.clone();
|
||||
move |is_expanded, _, cx| {
|
||||
terminal.update(cx, |terminal, cx| {
|
||||
terminal.set_embedded_mode(
|
||||
if is_expanded {
|
||||
None
|
||||
} else {
|
||||
Some(COLLAPSED_LINES)
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
},
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,10 @@ impl Tool for ThinkingTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./thinking_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
mod tool_call_card_header;
|
||||
mod tool_output_preview;
|
||||
|
||||
pub use tool_call_card_header::*;
|
||||
pub use tool_output_preview::*;
|
||||
|
||||
115
crates/assistant_tools/src/ui/tool_output_preview.rs
Normal file
115
crates/assistant_tools/src/ui/tool_output_preview.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use gpui::{AnyElement, EntityId, prelude::*};
|
||||
use ui::{Tooltip, prelude::*};
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct ToolOutputPreview<F>
|
||||
where
|
||||
F: Fn(bool, &mut Window, &mut App) + 'static,
|
||||
{
|
||||
content: AnyElement,
|
||||
entity_id: EntityId,
|
||||
full_height: bool,
|
||||
total_lines: usize,
|
||||
collapsed_fade: bool,
|
||||
on_toggle: Option<F>,
|
||||
}
|
||||
|
||||
pub const COLLAPSED_LINES: usize = 10;
|
||||
|
||||
impl<F> ToolOutputPreview<F>
|
||||
where
|
||||
F: Fn(bool, &mut Window, &mut App) + 'static,
|
||||
{
|
||||
pub fn new(content: AnyElement, entity_id: EntityId) -> Self {
|
||||
Self {
|
||||
content,
|
||||
entity_id,
|
||||
full_height: true,
|
||||
total_lines: 0,
|
||||
collapsed_fade: false,
|
||||
on_toggle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_total_lines(mut self, total_lines: usize) -> Self {
|
||||
self.total_lines = total_lines;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn toggle_state(mut self, full_height: bool) -> Self {
|
||||
self.full_height = full_height;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_collapsed_fade(mut self) -> Self {
|
||||
self.collapsed_fade = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_toggle(mut self, listener: F) -> Self {
|
||||
self.on_toggle = Some(listener);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<F> RenderOnce for ToolOutputPreview<F>
|
||||
where
|
||||
F: Fn(bool, &mut Window, &mut App) + 'static,
|
||||
{
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
if self.total_lines <= COLLAPSED_LINES {
|
||||
return self.content;
|
||||
}
|
||||
let border_color = cx.theme().colors().border.opacity(0.6);
|
||||
|
||||
let (icon, tooltip_label) = if self.full_height {
|
||||
(IconName::ChevronUp, "Collapse")
|
||||
} else {
|
||||
(IconName::ChevronDown, "Expand")
|
||||
};
|
||||
|
||||
let gradient_overlay =
|
||||
if self.collapsed_fade && !self.full_height {
|
||||
Some(div().absolute().bottom_5().left_0().w_full().h_2_5().bg(
|
||||
gpui::linear_gradient(
|
||||
0.,
|
||||
gpui::linear_color_stop(cx.theme().colors().editor_background, 0.),
|
||||
gpui::linear_color_stop(
|
||||
cx.theme().colors().editor_background.opacity(0.),
|
||||
1.,
|
||||
),
|
||||
),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.relative()
|
||||
.child(self.content)
|
||||
.children(gradient_overlay)
|
||||
.child(
|
||||
h_flex()
|
||||
.id(("expand-button", self.entity_id))
|
||||
.flex_none()
|
||||
.cursor_pointer()
|
||||
.h_5()
|
||||
.justify_center()
|
||||
.border_t_1()
|
||||
.rounded_b_md()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.hover(|style| style.bg(cx.theme().colors().element_hover.opacity(0.1)))
|
||||
.child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
|
||||
.tooltip(Tooltip::text(tooltip_label))
|
||||
.when_some(self.on_toggle, |this, on_toggle| {
|
||||
this.on_click({
|
||||
move |_, window, cx| {
|
||||
on_toggle(!self.full_height, window, cx);
|
||||
}
|
||||
})
|
||||
}),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,10 @@ impl Tool for WebSearchTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Search the web for information using your query. Use this when you need real-time information, facts, or data that might not be in your training. Results will include snippets and links from relevant web pages.".into()
|
||||
}
|
||||
|
||||
@@ -71,20 +71,22 @@ pub enum Model {
|
||||
// DeepSeek
|
||||
DeepSeekR1,
|
||||
// Meta models
|
||||
MetaLlama3_8BInstruct,
|
||||
MetaLlama3_70BInstruct,
|
||||
MetaLlama31_8BInstruct,
|
||||
MetaLlama31_70BInstruct,
|
||||
MetaLlama31_405BInstruct,
|
||||
MetaLlama32_1BInstruct,
|
||||
MetaLlama32_3BInstruct,
|
||||
MetaLlama32_11BMultiModal,
|
||||
MetaLlama32_90BMultiModal,
|
||||
MetaLlama33_70BInstruct,
|
||||
MetaLlama38BInstructV1,
|
||||
MetaLlama370BInstructV1,
|
||||
MetaLlama318BInstructV1_128k,
|
||||
MetaLlama318BInstructV1,
|
||||
MetaLlama3170BInstructV1_128k,
|
||||
MetaLlama3170BInstructV1,
|
||||
MetaLlama31405BInstructV1,
|
||||
MetaLlama321BInstructV1,
|
||||
MetaLlama323BInstructV1,
|
||||
MetaLlama3211BInstructV1,
|
||||
MetaLlama3290BInstructV1,
|
||||
MetaLlama3370BInstructV1,
|
||||
#[allow(non_camel_case_types)]
|
||||
MetaLlama4Scout_17BInstruct,
|
||||
MetaLlama4Scout17BInstructV1,
|
||||
#[allow(non_camel_case_types)]
|
||||
MetaLlama4Maverick_17BInstruct,
|
||||
MetaLlama4Maverick17BInstructV1,
|
||||
// Mistral models
|
||||
MistralMistral7BInstructV0,
|
||||
MistralMixtral8x7BInstructV0,
|
||||
@@ -129,6 +131,64 @@ impl Model {
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeSonnet4 => "claude-4-sonnet",
|
||||
Model::ClaudeSonnet4Thinking => "claude-4-sonnet-thinking",
|
||||
Model::ClaudeOpus4 => "claude-4-opus",
|
||||
Model::ClaudeOpus4Thinking => "claude-4-opus-thinking",
|
||||
Model::Claude3_5SonnetV2 => "claude-3-5-sonnet-v2",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet",
|
||||
Model::Claude3Opus => "claude-3-opus",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet",
|
||||
Model::Claude3Haiku => "claude-3-haiku",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku",
|
||||
Model::Claude3_7Sonnet => "claude-3-7-sonnet",
|
||||
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking",
|
||||
Model::AmazonNovaLite => "amazon-nova-lite",
|
||||
Model::AmazonNovaMicro => "amazon-nova-micro",
|
||||
Model::AmazonNovaPro => "amazon-nova-pro",
|
||||
Model::AmazonNovaPremier => "amazon-nova-premier",
|
||||
Model::DeepSeekR1 => "deepseek-r1",
|
||||
Model::AI21J2GrandeInstruct => "ai21-j2-grande-instruct",
|
||||
Model::AI21J2JumboInstruct => "ai21-j2-jumbo-instruct",
|
||||
Model::AI21J2Mid => "ai21-j2-mid",
|
||||
Model::AI21J2MidV1 => "ai21-j2-mid-v1",
|
||||
Model::AI21J2Ultra => "ai21-j2-ultra",
|
||||
Model::AI21J2UltraV1_8k => "ai21-j2-ultra-v1-8k",
|
||||
Model::AI21J2UltraV1 => "ai21-j2-ultra-v1",
|
||||
Model::AI21JambaInstructV1 => "ai21-jamba-instruct-v1",
|
||||
Model::AI21Jamba15LargeV1 => "ai21-jamba-1-5-large-v1",
|
||||
Model::AI21Jamba15MiniV1 => "ai21-jamba-1-5-mini-v1",
|
||||
Model::CohereCommandTextV14_4k => "cohere-command-text-v14-4k",
|
||||
Model::CohereCommandRV1 => "cohere-command-r-v1",
|
||||
Model::CohereCommandRPlusV1 => "cohere-command-r-plus-v1",
|
||||
Model::CohereCommandLightTextV14_4k => "cohere-command-light-text-v14-4k",
|
||||
Model::MetaLlama38BInstructV1 => "meta-llama3-8b-instruct-v1",
|
||||
Model::MetaLlama370BInstructV1 => "meta-llama3-70b-instruct-v1",
|
||||
Model::MetaLlama318BInstructV1_128k => "meta-llama3-1-8b-instruct-v1-128k",
|
||||
Model::MetaLlama318BInstructV1 => "meta-llama3-1-8b-instruct-v1",
|
||||
Model::MetaLlama3170BInstructV1_128k => "meta-llama3-1-70b-instruct-v1-128k",
|
||||
Model::MetaLlama3170BInstructV1 => "meta-llama3-1-70b-instruct-v1",
|
||||
Model::MetaLlama31405BInstructV1 => "meta-llama3-1-405b-instruct-v1",
|
||||
Model::MetaLlama321BInstructV1 => "meta-llama3-2-1b-instruct-v1",
|
||||
Model::MetaLlama323BInstructV1 => "meta-llama3-2-3b-instruct-v1",
|
||||
Model::MetaLlama3211BInstructV1 => "meta-llama3-2-11b-instruct-v1",
|
||||
Model::MetaLlama3290BInstructV1 => "meta-llama3-2-90b-instruct-v1",
|
||||
Model::MetaLlama3370BInstructV1 => "meta-llama3-3-70b-instruct-v1",
|
||||
Model::MetaLlama4Scout17BInstructV1 => "meta-llama4-scout-17b-instruct-v1",
|
||||
Model::MetaLlama4Maverick17BInstructV1 => "meta-llama4-maverick-17b-instruct-v1",
|
||||
Model::MistralMistral7BInstructV0 => "mistral-7b-instruct-v0",
|
||||
Model::MistralMixtral8x7BInstructV0 => "mistral-mixtral-8x7b-instruct-v0",
|
||||
Model::MistralMistralLarge2402V1 => "mistral-large-2402-v1",
|
||||
Model::MistralMistralSmall2402V1 => "mistral-small-2402-v1",
|
||||
Model::MistralPixtralLarge2502V1 => "mistral-pixtral-large-2502-v1",
|
||||
Model::PalmyraWriterX4 => "palmyra-writer-x4",
|
||||
Model::PalmyraWriterX5 => "palmyra-writer-x5",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request_id(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => {
|
||||
"anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
@@ -164,18 +224,20 @@ impl Model {
|
||||
Model::CohereCommandRV1 => "cohere.command-r-v1:0",
|
||||
Model::CohereCommandRPlusV1 => "cohere.command-r-plus-v1:0",
|
||||
Model::CohereCommandLightTextV14_4k => "cohere.command-light-text-v14:7:4k",
|
||||
Model::MetaLlama3_8BInstruct => "meta.llama3-8b-instruct-v1:0",
|
||||
Model::MetaLlama3_70BInstruct => "meta.llama3-70b-instruct-v1:0",
|
||||
Model::MetaLlama31_8BInstruct => "meta.llama3-1-8b-instruct-v1:0",
|
||||
Model::MetaLlama31_70BInstruct => "meta.llama3-1-70b-instruct-v1:0",
|
||||
Model::MetaLlama31_405BInstruct => "meta.llama3-1-405b-instruct-v1:0",
|
||||
Model::MetaLlama32_11BMultiModal => "meta.llama3-2-11b-instruct-v1:0",
|
||||
Model::MetaLlama32_90BMultiModal => "meta.llama3-2-90b-instruct-v1:0",
|
||||
Model::MetaLlama32_1BInstruct => "meta.llama3-2-1b-instruct-v1:0",
|
||||
Model::MetaLlama32_3BInstruct => "meta.llama3-2-3b-instruct-v1:0",
|
||||
Model::MetaLlama33_70BInstruct => "meta.llama3-3-70b-instruct-v1:0",
|
||||
Model::MetaLlama4Scout_17BInstruct => "meta.llama4-scout-17b-instruct-v1:0",
|
||||
Model::MetaLlama4Maverick_17BInstruct => "meta.llama4-maverick-17b-instruct-v1:0",
|
||||
Model::MetaLlama38BInstructV1 => "meta.llama3-8b-instruct-v1:0",
|
||||
Model::MetaLlama370BInstructV1 => "meta.llama3-70b-instruct-v1:0",
|
||||
Model::MetaLlama318BInstructV1_128k => "meta.llama3-1-8b-instruct-v1:0",
|
||||
Model::MetaLlama318BInstructV1 => "meta.llama3-1-8b-instruct-v1:0",
|
||||
Model::MetaLlama3170BInstructV1_128k => "meta.llama3-1-70b-instruct-v1:0",
|
||||
Model::MetaLlama3170BInstructV1 => "meta.llama3-1-70b-instruct-v1:0",
|
||||
Model::MetaLlama31405BInstructV1 => "meta.llama3-1-405b-instruct-v1:0",
|
||||
Model::MetaLlama3211BInstructV1 => "meta.llama3-2-11b-instruct-v1:0",
|
||||
Model::MetaLlama3290BInstructV1 => "meta.llama3-2-90b-instruct-v1:0",
|
||||
Model::MetaLlama321BInstructV1 => "meta.llama3-2-1b-instruct-v1:0",
|
||||
Model::MetaLlama323BInstructV1 => "meta.llama3-2-3b-instruct-v1:0",
|
||||
Model::MetaLlama3370BInstructV1 => "meta.llama3-3-70b-instruct-v1:0",
|
||||
Model::MetaLlama4Scout17BInstructV1 => "meta.llama4-scout-17b-instruct-v1:0",
|
||||
Model::MetaLlama4Maverick17BInstructV1 => "meta.llama4-maverick-17b-instruct-v1:0",
|
||||
Model::MistralMistral7BInstructV0 => "mistral.mistral-7b-instruct-v0:2",
|
||||
Model::MistralMixtral8x7BInstructV0 => "mistral.mixtral-8x7b-instruct-v0:1",
|
||||
Model::MistralMistralLarge2402V1 => "mistral.mistral-large-2402-v1:0",
|
||||
@@ -220,18 +282,20 @@ impl Model {
|
||||
Self::CohereCommandRV1 => "Cohere Command R V1",
|
||||
Self::CohereCommandRPlusV1 => "Cohere Command R Plus V1",
|
||||
Self::CohereCommandLightTextV14_4k => "Cohere Command Light Text V14 4K",
|
||||
Self::MetaLlama3_8BInstruct => "Meta Llama 3 8B Instruct",
|
||||
Self::MetaLlama3_70BInstruct => "Meta Llama 3 70B Instruct",
|
||||
Self::MetaLlama31_8BInstruct => "Meta Llama 3.1 8B Instruct",
|
||||
Self::MetaLlama31_70BInstruct => "Meta Llama 3.1 70B Instruct",
|
||||
Self::MetaLlama31_405BInstruct => "Meta Llama 3.1 405B Instruct",
|
||||
Self::MetaLlama32_11BMultiModal => "Meta Llama 3.2 11B Vision Instruct",
|
||||
Self::MetaLlama32_90BMultiModal => "Meta Llama 3.2 90B Vision Instruct",
|
||||
Self::MetaLlama32_1BInstruct => "Meta Llama 3.2 1B Instruct",
|
||||
Self::MetaLlama32_3BInstruct => "Meta Llama 3.2 3B Instruct",
|
||||
Self::MetaLlama33_70BInstruct => "Meta Llama 3.3 70B Instruct",
|
||||
Self::MetaLlama4Scout_17BInstruct => "Meta Llama 4 Scout 17B Instruct",
|
||||
Self::MetaLlama4Maverick_17BInstruct => "Meta Llama 4 Maverick 17B Instruct",
|
||||
Self::MetaLlama38BInstructV1 => "Meta Llama 3 8B Instruct",
|
||||
Self::MetaLlama370BInstructV1 => "Meta Llama 3 70B Instruct",
|
||||
Self::MetaLlama318BInstructV1_128k => "Meta Llama 3.1 8B Instruct 128K",
|
||||
Self::MetaLlama318BInstructV1 => "Meta Llama 3.1 8B Instruct",
|
||||
Self::MetaLlama3170BInstructV1_128k => "Meta Llama 3.1 70B Instruct 128K",
|
||||
Self::MetaLlama3170BInstructV1 => "Meta Llama 3.1 70B Instruct",
|
||||
Self::MetaLlama31405BInstructV1 => "Meta Llama 3.1 405B Instruct",
|
||||
Self::MetaLlama3211BInstructV1 => "Meta Llama 3.2 11B Instruct",
|
||||
Self::MetaLlama3290BInstructV1 => "Meta Llama 3.2 90B Instruct",
|
||||
Self::MetaLlama321BInstructV1 => "Meta Llama 3.2 1B Instruct",
|
||||
Self::MetaLlama323BInstructV1 => "Meta Llama 3.2 3B Instruct",
|
||||
Self::MetaLlama3370BInstructV1 => "Meta Llama 3.3 70B Instruct",
|
||||
Self::MetaLlama4Scout17BInstructV1 => "Meta Llama 4 Scout 17B Instruct",
|
||||
Self::MetaLlama4Maverick17BInstructV1 => "Meta Llama 4 Maverick 17B Instruct",
|
||||
Self::MistralMistral7BInstructV0 => "Mistral 7B Instruct V0",
|
||||
Self::MistralMixtral8x7BInstructV0 => "Mistral Mixtral 8x7B Instruct V0",
|
||||
Self::MistralMistralLarge2402V1 => "Mistral Large 2402 V1",
|
||||
@@ -253,7 +317,9 @@ impl Model {
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeOpus4 => 200_000,
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeOpus4Thinking => 200_000,
|
||||
Self::AmazonNovaPremier => 1_000_000,
|
||||
Self::PalmyraWriterX5 => 1_000_000,
|
||||
Self::PalmyraWriterX4 => 128_000,
|
||||
@@ -362,11 +428,11 @@ impl Model {
|
||||
anyhow::bail!("Unsupported Region {region}");
|
||||
};
|
||||
|
||||
let model_id = self.id();
|
||||
let model_id = self.request_id();
|
||||
|
||||
match (self, region_group) {
|
||||
// Custom models can't have CRI IDs
|
||||
(Model::Custom { .. }, _) => Ok(self.id().into()),
|
||||
(Model::Custom { .. }, _) => Ok(self.request_id().into()),
|
||||
|
||||
// Models with US Gov only
|
||||
(Model::Claude3_5Sonnet, "us-gov") | (Model::Claude3Haiku, "us-gov") => {
|
||||
@@ -390,16 +456,18 @@ impl Model {
|
||||
| Model::Claude3Opus
|
||||
| Model::Claude3Sonnet
|
||||
| Model::DeepSeekR1
|
||||
| Model::MetaLlama31_405BInstruct
|
||||
| Model::MetaLlama31_70BInstruct
|
||||
| Model::MetaLlama31_8BInstruct
|
||||
| Model::MetaLlama32_11BMultiModal
|
||||
| Model::MetaLlama32_1BInstruct
|
||||
| Model::MetaLlama32_3BInstruct
|
||||
| Model::MetaLlama32_90BMultiModal
|
||||
| Model::MetaLlama33_70BInstruct
|
||||
| Model::MetaLlama4Maverick_17BInstruct
|
||||
| Model::MetaLlama4Scout_17BInstruct
|
||||
| Model::MetaLlama31405BInstructV1
|
||||
| Model::MetaLlama3170BInstructV1_128k
|
||||
| Model::MetaLlama3170BInstructV1
|
||||
| Model::MetaLlama318BInstructV1_128k
|
||||
| Model::MetaLlama318BInstructV1
|
||||
| Model::MetaLlama3211BInstructV1
|
||||
| Model::MetaLlama321BInstructV1
|
||||
| Model::MetaLlama323BInstructV1
|
||||
| Model::MetaLlama3290BInstructV1
|
||||
| Model::MetaLlama3370BInstructV1
|
||||
| Model::MetaLlama4Maverick17BInstructV1
|
||||
| Model::MetaLlama4Scout17BInstructV1
|
||||
| Model::MistralPixtralLarge2502V1
|
||||
| Model::PalmyraWriterX4
|
||||
| Model::PalmyraWriterX5,
|
||||
@@ -413,8 +481,8 @@ impl Model {
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet
|
||||
| Model::MetaLlama32_1BInstruct
|
||||
| Model::MetaLlama32_3BInstruct
|
||||
| Model::MetaLlama321BInstructV1
|
||||
| Model::MetaLlama323BInstructV1
|
||||
| Model::MistralPixtralLarge2502V1,
|
||||
"eu",
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
@@ -429,7 +497,7 @@ impl Model {
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Any other combination is not supported
|
||||
_ => Ok(self.id().into()),
|
||||
_ => Ok(self.request_id().into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -506,15 +574,15 @@ mod tests {
|
||||
fn test_meta_models_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Meta models
|
||||
assert_eq!(
|
||||
Model::MetaLlama3_70BInstruct.cross_region_inference_id("us-east-1")?,
|
||||
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1")?,
|
||||
"meta.llama3-70b-instruct-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MetaLlama31_70BInstruct.cross_region_inference_id("us-east-1")?,
|
||||
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1")?,
|
||||
"us.meta.llama3-1-70b-instruct-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MetaLlama32_1BInstruct.cross_region_inference_id("eu-west-1")?,
|
||||
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1")?,
|
||||
"eu.meta.llama3-2-1b-instruct-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -584,4 +652,39 @@ mod tests {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_friendly_id_vs_request_id() {
|
||||
// Test that id() returns friendly identifiers
|
||||
assert_eq!(Model::Claude3_5SonnetV2.id(), "claude-3-5-sonnet-v2");
|
||||
assert_eq!(Model::AmazonNovaLite.id(), "amazon-nova-lite");
|
||||
assert_eq!(Model::DeepSeekR1.id(), "deepseek-r1");
|
||||
assert_eq!(
|
||||
Model::MetaLlama38BInstructV1.id(),
|
||||
"meta-llama3-8b-instruct-v1"
|
||||
);
|
||||
|
||||
// Test that request_id() returns actual backend model IDs
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.request_id(),
|
||||
"anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(Model::AmazonNovaLite.request_id(), "amazon.nova-lite-v1:0");
|
||||
assert_eq!(Model::DeepSeekR1.request_id(), "deepseek.r1-v1:0");
|
||||
assert_eq!(
|
||||
Model::MetaLlama38BInstructV1.request_id(),
|
||||
"meta.llama3-8b-instruct-v1:0"
|
||||
);
|
||||
|
||||
// Test thinking models have different friendly IDs but same request IDs
|
||||
assert_eq!(Model::ClaudeSonnet4.id(), "claude-4-sonnet");
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4Thinking.id(),
|
||||
"claude-4-sonnet-thinking"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.request_id(),
|
||||
Model::ClaudeSonnet4Thinking.request_id()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@ pub struct Channel {
|
||||
pub name: SharedString,
|
||||
pub visibility: proto::ChannelVisibility,
|
||||
pub parent_path: Vec<ChannelId>,
|
||||
pub channel_order: i32,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
@@ -614,7 +615,24 @@ impl ChannelStore {
|
||||
to: to.0,
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn reorder_channel(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
direction: proto::reorder_channel::Direction,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(async move |_, _| {
|
||||
client
|
||||
.request(proto::ReorderChannel {
|
||||
channel_id: channel_id.0,
|
||||
direction: direction.into(),
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -1027,6 +1045,18 @@ impl ChannelStore {
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn reset(&mut self) {
|
||||
self.channel_invitations.clear();
|
||||
self.channel_index.clear();
|
||||
self.channel_participants.clear();
|
||||
self.outgoing_invites.clear();
|
||||
self.opened_buffers.clear();
|
||||
self.opened_chats.clear();
|
||||
self.disconnect_channel_buffers_task = None;
|
||||
self.channel_states.clear();
|
||||
}
|
||||
|
||||
pub(crate) fn update_channels(
|
||||
&mut self,
|
||||
payload: proto::UpdateChannels,
|
||||
@@ -1051,6 +1081,7 @@ impl ChannelStore {
|
||||
visibility: channel.visibility(),
|
||||
name: channel.name.into(),
|
||||
parent_path: channel.parent_path.into_iter().map(ChannelId).collect(),
|
||||
channel_order: channel.channel_order,
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
@@ -61,11 +61,13 @@ impl ChannelPathsInsertGuard<'_> {
|
||||
|
||||
ret = existing_channel.visibility != channel_proto.visibility()
|
||||
|| existing_channel.name != channel_proto.name
|
||||
|| existing_channel.parent_path != parent_path;
|
||||
|| existing_channel.parent_path != parent_path
|
||||
|| existing_channel.channel_order != channel_proto.channel_order;
|
||||
|
||||
existing_channel.visibility = channel_proto.visibility();
|
||||
existing_channel.name = channel_proto.name.into();
|
||||
existing_channel.parent_path = parent_path;
|
||||
existing_channel.channel_order = channel_proto.channel_order;
|
||||
} else {
|
||||
self.channels_by_id.insert(
|
||||
ChannelId(channel_proto.id),
|
||||
@@ -74,6 +76,7 @@ impl ChannelPathsInsertGuard<'_> {
|
||||
visibility: channel_proto.visibility(),
|
||||
name: channel_proto.name.into(),
|
||||
parent_path,
|
||||
channel_order: channel_proto.channel_order,
|
||||
}),
|
||||
);
|
||||
self.insert_root(ChannelId(channel_proto.id));
|
||||
@@ -100,17 +103,18 @@ impl Drop for ChannelPathsInsertGuard<'_> {
|
||||
fn channel_path_sorting_key(
|
||||
id: ChannelId,
|
||||
channels_by_id: &BTreeMap<ChannelId, Arc<Channel>>,
|
||||
) -> impl Iterator<Item = (&str, ChannelId)> {
|
||||
let (parent_path, name) = channels_by_id
|
||||
.get(&id)
|
||||
.map_or((&[] as &[_], None), |channel| {
|
||||
(
|
||||
channel.parent_path.as_slice(),
|
||||
Some((channel.name.as_ref(), channel.id)),
|
||||
)
|
||||
});
|
||||
) -> impl Iterator<Item = (i32, ChannelId)> {
|
||||
let (parent_path, order_and_id) =
|
||||
channels_by_id
|
||||
.get(&id)
|
||||
.map_or((&[] as &[_], None), |channel| {
|
||||
(
|
||||
channel.parent_path.as_slice(),
|
||||
Some((channel.channel_order, channel.id)),
|
||||
)
|
||||
});
|
||||
parent_path
|
||||
.iter()
|
||||
.filter_map(|id| Some((channels_by_id.get(id)?.name.as_ref(), *id)))
|
||||
.chain(name)
|
||||
.filter_map(|id| Some((channels_by_id.get(id)?.channel_order, *id)))
|
||||
.chain(order_and_id)
|
||||
}
|
||||
|
||||
@@ -21,12 +21,14 @@ fn test_update_channels(cx: &mut App) {
|
||||
name: "b".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: Vec::new(),
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 2,
|
||||
name: "a".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: Vec::new(),
|
||||
channel_order: 2,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
@@ -37,8 +39,8 @@ fn test_update_channels(cx: &mut App) {
|
||||
&channel_store,
|
||||
&[
|
||||
//
|
||||
(0, "a".to_string()),
|
||||
(0, "b".to_string()),
|
||||
(0, "a".to_string()),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
@@ -52,12 +54,14 @@ fn test_update_channels(cx: &mut App) {
|
||||
name: "x".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![1],
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 4,
|
||||
name: "y".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![2],
|
||||
channel_order: 1,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
@@ -67,15 +71,111 @@ fn test_update_channels(cx: &mut App) {
|
||||
assert_channels(
|
||||
&channel_store,
|
||||
&[
|
||||
(0, "a".to_string()),
|
||||
(1, "y".to_string()),
|
||||
(0, "b".to_string()),
|
||||
(1, "x".to_string()),
|
||||
(0, "a".to_string()),
|
||||
(1, "y".to_string()),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_update_channels_order_independent(cx: &mut App) {
|
||||
/// Based on: https://stackoverflow.com/a/59939809
|
||||
fn unique_permutations<T: Clone>(items: Vec<T>) -> Vec<Vec<T>> {
|
||||
if items.len() == 1 {
|
||||
vec![items]
|
||||
} else {
|
||||
let mut output: Vec<Vec<T>> = vec![];
|
||||
|
||||
for (ix, first) in items.iter().enumerate() {
|
||||
let mut remaining_elements = items.clone();
|
||||
remaining_elements.remove(ix);
|
||||
for mut permutation in unique_permutations(remaining_elements) {
|
||||
permutation.insert(0, first.clone());
|
||||
output.push(permutation);
|
||||
}
|
||||
}
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
let test_data = vec![
|
||||
proto::Channel {
|
||||
id: 6,
|
||||
name: "β".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![1, 3],
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 5,
|
||||
name: "α".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![1],
|
||||
channel_order: 2,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 3,
|
||||
name: "x".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![1],
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 4,
|
||||
name: "y".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![2],
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 1,
|
||||
name: "b".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: Vec::new(),
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 2,
|
||||
name: "a".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: Vec::new(),
|
||||
channel_order: 2,
|
||||
},
|
||||
];
|
||||
|
||||
let channel_store = init_test(cx);
|
||||
let permutations = unique_permutations(test_data);
|
||||
|
||||
for test_instance in permutations {
|
||||
channel_store.update(cx, |channel_store, _| channel_store.reset());
|
||||
|
||||
update_channels(
|
||||
&channel_store,
|
||||
proto::UpdateChannels {
|
||||
channels: test_instance,
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_channels(
|
||||
&channel_store,
|
||||
&[
|
||||
(0, "b".to_string()),
|
||||
(1, "x".to_string()),
|
||||
(2, "β".to_string()),
|
||||
(1, "α".to_string()),
|
||||
(0, "a".to_string()),
|
||||
(1, "y".to_string()),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_dangling_channel_paths(cx: &mut App) {
|
||||
let channel_store = init_test(cx);
|
||||
@@ -89,18 +189,21 @@ fn test_dangling_channel_paths(cx: &mut App) {
|
||||
name: "a".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![],
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 1,
|
||||
name: "b".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![0],
|
||||
channel_order: 1,
|
||||
},
|
||||
proto::Channel {
|
||||
id: 2,
|
||||
name: "c".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![0, 1],
|
||||
channel_order: 1,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
@@ -147,6 +250,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
name: "the-channel".to_string(),
|
||||
visibility: proto::ChannelVisibility::Members as i32,
|
||||
parent_path: vec![],
|
||||
channel_order: 1,
|
||||
}],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
@@ -57,7 +57,7 @@ We run two instances of collab:
|
||||
|
||||
Both of these run on the Kubernetes cluster hosted in Digital Ocean.
|
||||
|
||||
Deployment is triggered by pushing to the `collab-staging` (or `collab-production`) tag in Github. The best way to do this is:
|
||||
Deployment is triggered by pushing to the `collab-staging` (or `collab-production`) tag in GitHub. The best way to do this is:
|
||||
|
||||
- `./script/deploy-collab staging`
|
||||
- `./script/deploy-collab production`
|
||||
|
||||
@@ -266,11 +266,14 @@ CREATE TABLE "channels" (
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"visibility" VARCHAR NOT NULL,
|
||||
"parent_path" TEXT NOT NULL,
|
||||
"requires_zed_cla" BOOLEAN NOT NULL DEFAULT FALSE
|
||||
"requires_zed_cla" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"channel_order" INTEGER NOT NULL DEFAULT 1
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path");
|
||||
|
||||
CREATE INDEX "index_channels_on_parent_path_and_order" ON "channels" ("parent_path", "channel_order");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
-- Add channel_order column to channels table with default value
|
||||
ALTER TABLE channels ADD COLUMN channel_order INTEGER NOT NULL DEFAULT 1;
|
||||
|
||||
-- Update channel_order for existing channels using ROW_NUMBER for deterministic ordering
|
||||
UPDATE channels
|
||||
SET channel_order = (
|
||||
SELECT ROW_NUMBER() OVER (
|
||||
PARTITION BY parent_path
|
||||
ORDER BY name, id
|
||||
)
|
||||
FROM channels c2
|
||||
WHERE c2.id = channels.id
|
||||
);
|
||||
|
||||
-- Create index for efficient ordering queries
|
||||
CREATE INDEX "index_channels_on_parent_path_and_order" ON "channels" ("parent_path", "channel_order");
|
||||
@@ -582,6 +582,7 @@ pub struct Channel {
|
||||
pub visibility: ChannelVisibility,
|
||||
/// parent_path is the channel ids from the root to this one (not including this one)
|
||||
pub parent_path: Vec<ChannelId>,
|
||||
pub channel_order: i32,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
@@ -591,6 +592,7 @@ impl Channel {
|
||||
visibility: value.visibility,
|
||||
name: value.clone().name,
|
||||
parent_path: value.ancestors().collect(),
|
||||
channel_order: value.channel_order,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -600,8 +602,13 @@ impl Channel {
|
||||
name: self.name.clone(),
|
||||
visibility: self.visibility.into(),
|
||||
parent_path: self.parent_path.iter().map(|c| c.to_proto()).collect(),
|
||||
channel_order: self.channel_order,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_id(&self) -> ChannelId {
|
||||
self.parent_path.first().copied().unwrap_or(self.id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
|
||||
@@ -4,7 +4,7 @@ use rpc::{
|
||||
ErrorCode, ErrorCodeExt,
|
||||
proto::{ChannelBufferVersion, VectorClockEntry, channel_member::Kind},
|
||||
};
|
||||
use sea_orm::{DbBackend, TryGetableMany};
|
||||
use sea_orm::{ActiveValue, DbBackend, TryGetableMany};
|
||||
|
||||
impl Database {
|
||||
#[cfg(test)]
|
||||
@@ -59,16 +59,32 @@ impl Database {
|
||||
parent = Some(parent_channel);
|
||||
}
|
||||
|
||||
let parent_path = parent
|
||||
.as_ref()
|
||||
.map_or(String::new(), |parent| parent.path());
|
||||
|
||||
// Find the maximum channel_order among siblings to set the new channel at the end
|
||||
let max_order = if parent_path.is_empty() {
|
||||
0
|
||||
} else {
|
||||
max_order(&parent_path, &tx).await?
|
||||
};
|
||||
|
||||
log::info!(
|
||||
"Creating channel '{}' with parent_path='{}', max_order={}, new_order={}",
|
||||
name,
|
||||
parent_path,
|
||||
max_order,
|
||||
max_order + 1
|
||||
);
|
||||
|
||||
let channel = channel::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
visibility: ActiveValue::Set(ChannelVisibility::Members),
|
||||
parent_path: ActiveValue::Set(
|
||||
parent
|
||||
.as_ref()
|
||||
.map_or(String::new(), |parent| parent.path()),
|
||||
),
|
||||
parent_path: ActiveValue::Set(parent_path),
|
||||
requires_zed_cla: ActiveValue::NotSet,
|
||||
channel_order: ActiveValue::Set(max_order + 1),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -531,11 +547,7 @@ impl Database {
|
||||
.get_channel_descendants_excluding_self(channels.iter(), tx)
|
||||
.await?;
|
||||
|
||||
for channel in channels {
|
||||
if let Err(ix) = descendants.binary_search_by_key(&channel.path(), |c| c.path()) {
|
||||
descendants.insert(ix, channel);
|
||||
}
|
||||
}
|
||||
descendants.extend(channels);
|
||||
|
||||
let roles_by_channel_id = channel_memberships
|
||||
.iter()
|
||||
@@ -952,11 +964,14 @@ impl Database {
|
||||
}
|
||||
|
||||
let root_id = channel.root_id();
|
||||
let new_parent_path = new_parent.path();
|
||||
let old_path = format!("{}{}/", channel.parent_path, channel.id);
|
||||
let new_path = format!("{}{}/", new_parent.path(), channel.id);
|
||||
let new_path = format!("{}{}/", &new_parent_path, channel.id);
|
||||
let new_order = max_order(&new_parent_path, &tx).await? + 1;
|
||||
|
||||
let mut model = channel.into_active_model();
|
||||
model.parent_path = ActiveValue::Set(new_parent.path());
|
||||
model.channel_order = ActiveValue::Set(new_order);
|
||||
let channel = model.update(&*tx).await?;
|
||||
|
||||
let descendent_ids =
|
||||
@@ -986,6 +1001,137 @@ impl Database {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn reorder_channel(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
direction: proto::reorder_channel::Direction,
|
||||
user_id: UserId,
|
||||
) -> Result<Vec<Channel>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
|
||||
if channel.is_root() {
|
||||
log::info!("Skipping reorder of root channel {}", channel.id,);
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Reordering channel {} (parent_path: '{}', order: {})",
|
||||
channel.id,
|
||||
channel.parent_path,
|
||||
channel.channel_order
|
||||
);
|
||||
|
||||
// Check if user is admin of the channel
|
||||
self.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
// Find the sibling channel to swap with
|
||||
let sibling_channel = match direction {
|
||||
proto::reorder_channel::Direction::Up => {
|
||||
log::info!(
|
||||
"Looking for sibling with parent_path='{}' and order < {}",
|
||||
channel.parent_path,
|
||||
channel.channel_order
|
||||
);
|
||||
// Find channel with highest order less than current
|
||||
channel::Entity::find()
|
||||
.filter(
|
||||
channel::Column::ParentPath
|
||||
.eq(&channel.parent_path)
|
||||
.and(channel::Column::ChannelOrder.lt(channel.channel_order)),
|
||||
)
|
||||
.order_by_desc(channel::Column::ChannelOrder)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
}
|
||||
proto::reorder_channel::Direction::Down => {
|
||||
log::info!(
|
||||
"Looking for sibling with parent_path='{}' and order > {}",
|
||||
channel.parent_path,
|
||||
channel.channel_order
|
||||
);
|
||||
// Find channel with lowest order greater than current
|
||||
channel::Entity::find()
|
||||
.filter(
|
||||
channel::Column::ParentPath
|
||||
.eq(&channel.parent_path)
|
||||
.and(channel::Column::ChannelOrder.gt(channel.channel_order)),
|
||||
)
|
||||
.order_by_asc(channel::Column::ChannelOrder)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
let mut sibling_channel = match sibling_channel {
|
||||
Some(sibling) => {
|
||||
log::info!(
|
||||
"Found sibling {} (parent_path: '{}', order: {})",
|
||||
sibling.id,
|
||||
sibling.parent_path,
|
||||
sibling.channel_order
|
||||
);
|
||||
sibling
|
||||
}
|
||||
None => {
|
||||
log::warn!("No sibling found to swap with");
|
||||
// No sibling to swap with
|
||||
return Ok(vec![]);
|
||||
}
|
||||
};
|
||||
|
||||
let current_order = channel.channel_order;
|
||||
let sibling_order = sibling_channel.channel_order;
|
||||
|
||||
channel::ActiveModel {
|
||||
id: ActiveValue::Unchanged(sibling_channel.id),
|
||||
channel_order: ActiveValue::Set(current_order),
|
||||
..Default::default()
|
||||
}
|
||||
.update(&*tx)
|
||||
.await?;
|
||||
sibling_channel.channel_order = current_order;
|
||||
|
||||
channel::ActiveModel {
|
||||
id: ActiveValue::Unchanged(channel.id),
|
||||
channel_order: ActiveValue::Set(sibling_order),
|
||||
..Default::default()
|
||||
}
|
||||
.update(&*tx)
|
||||
.await?;
|
||||
channel.channel_order = sibling_order;
|
||||
|
||||
log::info!(
|
||||
"Reorder complete. Swapped channels {} and {}",
|
||||
channel.id,
|
||||
sibling_channel.id
|
||||
);
|
||||
|
||||
let swapped_channels = vec![
|
||||
Channel::from_model(channel),
|
||||
Channel::from_model(sibling_channel),
|
||||
];
|
||||
|
||||
Ok(swapped_channels)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
async fn max_order(parent_path: &str, tx: &TransactionHandle) -> Result<i32> {
|
||||
let max_order = channel::Entity::find()
|
||||
.filter(channel::Column::ParentPath.eq(parent_path))
|
||||
.select_only()
|
||||
.column_as(channel::Column::ChannelOrder.max(), "max_order")
|
||||
.into_tuple::<Option<i32>>()
|
||||
.one(&**tx)
|
||||
.await?
|
||||
.flatten()
|
||||
.unwrap_or(0);
|
||||
|
||||
Ok(max_order)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
|
||||
@@ -66,6 +66,87 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Delete all channel chat participants from previous servers
|
||||
pub async fn delete_stale_channel_chat_participants(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let stale_server_epochs = self
|
||||
.stale_server_ids(environment, new_server_id, &tx)
|
||||
.await?;
|
||||
|
||||
channel_chat_participant::Entity::delete_many()
|
||||
.filter(
|
||||
channel_chat_participant::Column::ConnectionServerId
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn clear_old_worktree_entries(&self, server_id: ServerId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
use sea_orm::Statement;
|
||||
use sea_orm::sea_query::{Expr, Query};
|
||||
|
||||
loop {
|
||||
let delete_query = Query::delete()
|
||||
.from_table(worktree_entry::Entity)
|
||||
.and_where(
|
||||
Expr::tuple([
|
||||
Expr::col((worktree_entry::Entity, worktree_entry::Column::ProjectId))
|
||||
.into(),
|
||||
Expr::col((worktree_entry::Entity, worktree_entry::Column::WorktreeId))
|
||||
.into(),
|
||||
Expr::col((worktree_entry::Entity, worktree_entry::Column::Id)).into(),
|
||||
])
|
||||
.in_subquery(
|
||||
Query::select()
|
||||
.columns([
|
||||
(worktree_entry::Entity, worktree_entry::Column::ProjectId),
|
||||
(worktree_entry::Entity, worktree_entry::Column::WorktreeId),
|
||||
(worktree_entry::Entity, worktree_entry::Column::Id),
|
||||
])
|
||||
.from(worktree_entry::Entity)
|
||||
.inner_join(
|
||||
project::Entity,
|
||||
Expr::col((project::Entity, project::Column::Id)).equals((
|
||||
worktree_entry::Entity,
|
||||
worktree_entry::Column::ProjectId,
|
||||
)),
|
||||
)
|
||||
.and_where(project::Column::HostConnectionServerId.ne(server_id))
|
||||
.limit(10000)
|
||||
.to_owned(),
|
||||
),
|
||||
)
|
||||
.to_owned();
|
||||
|
||||
let statement = Statement::from_sql_and_values(
|
||||
tx.get_database_backend(),
|
||||
delete_query
|
||||
.to_string(sea_orm::sea_query::PostgresQueryBuilder)
|
||||
.as_str(),
|
||||
vec![],
|
||||
);
|
||||
|
||||
let result = tx.execute(statement).await?;
|
||||
if result.rows_affected() == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Deletes any stale servers in the environment that don't match the `new_server_id`.
|
||||
pub async fn delete_stale_servers(
|
||||
&self,
|
||||
@@ -86,7 +167,7 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
async fn stale_server_ids(
|
||||
pub async fn stale_server_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
|
||||
@@ -10,6 +10,9 @@ pub struct Model {
|
||||
pub visibility: ChannelVisibility,
|
||||
pub parent_path: String,
|
||||
pub requires_zed_cla: bool,
|
||||
/// The order of this channel relative to its siblings within the same parent.
|
||||
/// Lower values appear first. Channels are sorted by parent_path first, then by channel_order.
|
||||
pub channel_order: i32,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
|
||||
@@ -172,16 +172,40 @@ impl Drop for TestDb {
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_channel_tree_matches(actual: Vec<Channel>, expected: Vec<Channel>) {
|
||||
let expected_channels = expected.into_iter().collect::<HashSet<_>>();
|
||||
let actual_channels = actual.into_iter().collect::<HashSet<_>>();
|
||||
pretty_assertions::assert_eq!(expected_channels, actual_channels);
|
||||
}
|
||||
|
||||
fn channel_tree(channels: &[(ChannelId, &[ChannelId], &'static str)]) -> Vec<Channel> {
|
||||
channels
|
||||
.iter()
|
||||
.map(|(id, parent_path, name)| Channel {
|
||||
use std::collections::HashMap;
|
||||
|
||||
let mut result = Vec::new();
|
||||
let mut order_by_parent: HashMap<Vec<ChannelId>, i32> = HashMap::new();
|
||||
|
||||
for (id, parent_path, name) in channels {
|
||||
let parent_key = parent_path.to_vec();
|
||||
let order = if parent_key.is_empty() {
|
||||
1
|
||||
} else {
|
||||
*order_by_parent
|
||||
.entry(parent_key.clone())
|
||||
.and_modify(|e| *e += 1)
|
||||
.or_insert(1)
|
||||
};
|
||||
|
||||
result.push(Channel {
|
||||
id: *id,
|
||||
name: name.to_string(),
|
||||
visibility: ChannelVisibility::Members,
|
||||
parent_path: parent_path.to_vec(),
|
||||
})
|
||||
.collect()
|
||||
parent_path: parent_key,
|
||||
channel_order: order,
|
||||
});
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
static GITHUB_USER_ID: AtomicI32 = AtomicI32::new(5);
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
use crate::{
|
||||
db::{
|
||||
Channel, ChannelId, ChannelRole, Database, NewUserParams, RoomId, UserId,
|
||||
tests::{channel_tree, new_test_connection, new_test_user},
|
||||
tests::{assert_channel_tree_matches, channel_tree, new_test_connection, new_test_user},
|
||||
},
|
||||
test_both_dbs,
|
||||
};
|
||||
use rpc::{
|
||||
ConnectionId,
|
||||
proto::{self},
|
||||
proto::{self, reorder_channel},
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite);
|
||||
|
||||
@@ -59,28 +59,28 @@ async fn test_channels(db: &Arc<Database>) {
|
||||
.unwrap();
|
||||
|
||||
let result = db.get_channels_for_user(a_id).await.unwrap();
|
||||
assert_eq!(
|
||||
assert_channel_tree_matches(
|
||||
result.channels,
|
||||
channel_tree(&[
|
||||
(zed_id, &[], "zed"),
|
||||
(crdb_id, &[zed_id], "crdb"),
|
||||
(livestreaming_id, &[zed_id], "livestreaming",),
|
||||
(livestreaming_id, &[zed_id], "livestreaming"),
|
||||
(replace_id, &[zed_id], "replace"),
|
||||
(rust_id, &[], "rust"),
|
||||
(cargo_id, &[rust_id], "cargo"),
|
||||
(cargo_ra_id, &[rust_id, cargo_id], "cargo-ra",)
|
||||
],)
|
||||
(cargo_ra_id, &[rust_id, cargo_id], "cargo-ra"),
|
||||
]),
|
||||
);
|
||||
|
||||
let result = db.get_channels_for_user(b_id).await.unwrap();
|
||||
assert_eq!(
|
||||
assert_channel_tree_matches(
|
||||
result.channels,
|
||||
channel_tree(&[
|
||||
(zed_id, &[], "zed"),
|
||||
(crdb_id, &[zed_id], "crdb"),
|
||||
(livestreaming_id, &[zed_id], "livestreaming",),
|
||||
(replace_id, &[zed_id], "replace")
|
||||
],)
|
||||
(livestreaming_id, &[zed_id], "livestreaming"),
|
||||
(replace_id, &[zed_id], "replace"),
|
||||
]),
|
||||
);
|
||||
|
||||
// Update member permissions
|
||||
@@ -94,14 +94,14 @@ async fn test_channels(db: &Arc<Database>) {
|
||||
assert!(set_channel_admin.is_ok());
|
||||
|
||||
let result = db.get_channels_for_user(b_id).await.unwrap();
|
||||
assert_eq!(
|
||||
assert_channel_tree_matches(
|
||||
result.channels,
|
||||
channel_tree(&[
|
||||
(zed_id, &[], "zed"),
|
||||
(crdb_id, &[zed_id], "crdb"),
|
||||
(livestreaming_id, &[zed_id], "livestreaming",),
|
||||
(replace_id, &[zed_id], "replace")
|
||||
],)
|
||||
(livestreaming_id, &[zed_id], "livestreaming"),
|
||||
(replace_id, &[zed_id], "replace"),
|
||||
]),
|
||||
);
|
||||
|
||||
// Remove a single channel
|
||||
@@ -313,8 +313,8 @@ async fn test_channel_renames(db: &Arc<Database>) {
|
||||
|
||||
test_both_dbs!(
|
||||
test_db_channel_moving,
|
||||
test_channels_moving_postgres,
|
||||
test_channels_moving_sqlite
|
||||
test_db_channel_moving_postgres,
|
||||
test_db_channel_moving_sqlite
|
||||
);
|
||||
|
||||
async fn test_db_channel_moving(db: &Arc<Database>) {
|
||||
@@ -343,16 +343,14 @@ async fn test_db_channel_moving(db: &Arc<Database>) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let livestreaming_dag_id = db
|
||||
.create_sub_channel("livestreaming_dag", livestreaming_id, a_id)
|
||||
let livestreaming_sub_id = db
|
||||
.create_sub_channel("livestreaming_sub", livestreaming_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// ========================================================================
|
||||
// sanity check
|
||||
// Initial DAG:
|
||||
// /- gpui2
|
||||
// zed -- crdb - livestreaming - livestreaming_dag
|
||||
// zed -- crdb - livestreaming - livestreaming_sub
|
||||
let result = db.get_channels_for_user(a_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
@@ -360,10 +358,242 @@ async fn test_db_channel_moving(db: &Arc<Database>) {
|
||||
(zed_id, &[]),
|
||||
(crdb_id, &[zed_id]),
|
||||
(livestreaming_id, &[zed_id, crdb_id]),
|
||||
(livestreaming_dag_id, &[zed_id, crdb_id, livestreaming_id]),
|
||||
(livestreaming_sub_id, &[zed_id, crdb_id, livestreaming_id]),
|
||||
(gpui2_id, &[zed_id]),
|
||||
],
|
||||
);
|
||||
|
||||
// Check that we can do a simple leaf -> leaf move
|
||||
db.move_channel(livestreaming_sub_id, crdb_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// /- gpui2
|
||||
// zed -- crdb -- livestreaming
|
||||
// \- livestreaming_sub
|
||||
let result = db.get_channels_for_user(a_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
&[
|
||||
(zed_id, &[]),
|
||||
(crdb_id, &[zed_id]),
|
||||
(livestreaming_id, &[zed_id, crdb_id]),
|
||||
(livestreaming_sub_id, &[zed_id, crdb_id]),
|
||||
(gpui2_id, &[zed_id]),
|
||||
],
|
||||
);
|
||||
|
||||
// Check that we can move a whole subtree at once
|
||||
db.move_channel(crdb_id, gpui2_id, a_id).await.unwrap();
|
||||
|
||||
// zed -- gpui2 -- crdb -- livestreaming
|
||||
// \- livestreaming_sub
|
||||
let result = db.get_channels_for_user(a_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
&[
|
||||
(zed_id, &[]),
|
||||
(gpui2_id, &[zed_id]),
|
||||
(crdb_id, &[zed_id, gpui2_id]),
|
||||
(livestreaming_id, &[zed_id, gpui2_id, crdb_id]),
|
||||
(livestreaming_sub_id, &[zed_id, gpui2_id, crdb_id]),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_reordering,
|
||||
test_channel_reordering_postgres,
|
||||
test_channel_reordering_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_reordering(db: &Arc<Database>) {
|
||||
let admin_id = db
|
||||
.create_user(
|
||||
"admin@example.com",
|
||||
None,
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "admin".into(),
|
||||
github_user_id: 1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let user_id = db
|
||||
.create_user(
|
||||
"user@example.com",
|
||||
None,
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user".into(),
|
||||
github_user_id: 2,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
// Create a root channel with some sub-channels
|
||||
let root_id = db.create_root_channel("root", admin_id).await.unwrap();
|
||||
|
||||
// Invite user to root channel so they can see the sub-channels
|
||||
db.invite_channel_member(root_id, user_id, admin_id, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(root_id, user_id, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let alpha_id = db
|
||||
.create_sub_channel("alpha", root_id, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let beta_id = db
|
||||
.create_sub_channel("beta", root_id, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let gamma_id = db
|
||||
.create_sub_channel("gamma", root_id, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Initial order should be: root, alpha (order=1), beta (order=2), gamma (order=3)
|
||||
let result = db.get_channels_for_user(admin_id).await.unwrap();
|
||||
assert_channel_tree_order(
|
||||
result.channels,
|
||||
&[
|
||||
(root_id, &[], 1),
|
||||
(alpha_id, &[root_id], 1),
|
||||
(beta_id, &[root_id], 2),
|
||||
(gamma_id, &[root_id], 3),
|
||||
],
|
||||
);
|
||||
|
||||
// Test moving beta up (should swap with alpha)
|
||||
let updated_channels = db
|
||||
.reorder_channel(beta_id, reorder_channel::Direction::Up, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify that beta and alpha were returned as updated
|
||||
assert_eq!(updated_channels.len(), 2);
|
||||
let updated_ids: std::collections::HashSet<_> = updated_channels.iter().map(|c| c.id).collect();
|
||||
assert!(updated_ids.contains(&alpha_id));
|
||||
assert!(updated_ids.contains(&beta_id));
|
||||
|
||||
// Now order should be: root, beta (order=1), alpha (order=2), gamma (order=3)
|
||||
let result = db.get_channels_for_user(admin_id).await.unwrap();
|
||||
assert_channel_tree_order(
|
||||
result.channels,
|
||||
&[
|
||||
(root_id, &[], 1),
|
||||
(beta_id, &[root_id], 1),
|
||||
(alpha_id, &[root_id], 2),
|
||||
(gamma_id, &[root_id], 3),
|
||||
],
|
||||
);
|
||||
|
||||
// Test moving gamma down (should be no-op since it's already last)
|
||||
let updated_channels = db
|
||||
.reorder_channel(gamma_id, reorder_channel::Direction::Down, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should return just nothing
|
||||
assert_eq!(updated_channels.len(), 0);
|
||||
|
||||
// Test moving alpha down (should swap with gamma)
|
||||
let updated_channels = db
|
||||
.reorder_channel(alpha_id, reorder_channel::Direction::Down, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify that alpha and gamma were returned as updated
|
||||
assert_eq!(updated_channels.len(), 2);
|
||||
let updated_ids: std::collections::HashSet<_> = updated_channels.iter().map(|c| c.id).collect();
|
||||
assert!(updated_ids.contains(&alpha_id));
|
||||
assert!(updated_ids.contains(&gamma_id));
|
||||
|
||||
// Now order should be: root, beta (order=1), gamma (order=2), alpha (order=3)
|
||||
let result = db.get_channels_for_user(admin_id).await.unwrap();
|
||||
assert_channel_tree_order(
|
||||
result.channels,
|
||||
&[
|
||||
(root_id, &[], 1),
|
||||
(beta_id, &[root_id], 1),
|
||||
(gamma_id, &[root_id], 2),
|
||||
(alpha_id, &[root_id], 3),
|
||||
],
|
||||
);
|
||||
|
||||
// Test that non-admin cannot reorder
|
||||
let reorder_result = db
|
||||
.reorder_channel(beta_id, reorder_channel::Direction::Up, user_id)
|
||||
.await;
|
||||
assert!(reorder_result.is_err());
|
||||
|
||||
// Test moving beta up (should be no-op since it's already first)
|
||||
let updated_channels = db
|
||||
.reorder_channel(beta_id, reorder_channel::Direction::Up, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should return nothing
|
||||
assert_eq!(updated_channels.len(), 0);
|
||||
|
||||
// Adding a channel to an existing ordering should add it to the end
|
||||
let delta_id = db
|
||||
.create_sub_channel("delta", root_id, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = db.get_channels_for_user(admin_id).await.unwrap();
|
||||
assert_channel_tree_order(
|
||||
result.channels,
|
||||
&[
|
||||
(root_id, &[], 1),
|
||||
(beta_id, &[root_id], 1),
|
||||
(gamma_id, &[root_id], 2),
|
||||
(alpha_id, &[root_id], 3),
|
||||
(delta_id, &[root_id], 4),
|
||||
],
|
||||
);
|
||||
|
||||
// And moving a channel into an existing ordering should add it to the end
|
||||
let eta_id = db
|
||||
.create_sub_channel("eta", delta_id, admin_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = db.get_channels_for_user(admin_id).await.unwrap();
|
||||
assert_channel_tree_order(
|
||||
result.channels,
|
||||
&[
|
||||
(root_id, &[], 1),
|
||||
(beta_id, &[root_id], 1),
|
||||
(gamma_id, &[root_id], 2),
|
||||
(alpha_id, &[root_id], 3),
|
||||
(delta_id, &[root_id], 4),
|
||||
(eta_id, &[root_id, delta_id], 1),
|
||||
],
|
||||
);
|
||||
|
||||
db.move_channel(eta_id, root_id, admin_id).await.unwrap();
|
||||
let result = db.get_channels_for_user(admin_id).await.unwrap();
|
||||
assert_channel_tree_order(
|
||||
result.channels,
|
||||
&[
|
||||
(root_id, &[], 1),
|
||||
(beta_id, &[root_id], 1),
|
||||
(gamma_id, &[root_id], 2),
|
||||
(alpha_id, &[root_id], 3),
|
||||
(delta_id, &[root_id], 4),
|
||||
(eta_id, &[root_id], 5),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
@@ -422,6 +652,20 @@ async fn test_db_channel_moving_bugs(db: &Arc<Database>) {
|
||||
(livestreaming_id, &[zed_id, projects_id]),
|
||||
],
|
||||
);
|
||||
|
||||
// Can't un-root a root channel
|
||||
db.move_channel(zed_id, livestreaming_id, user_id)
|
||||
.await
|
||||
.unwrap_err();
|
||||
let result = db.get_channels_for_user(user_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
&[
|
||||
(zed_id, &[]),
|
||||
(projects_id, &[zed_id]),
|
||||
(livestreaming_id, &[zed_id, projects_id]),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
@@ -745,10 +989,29 @@ fn assert_channel_tree(actual: Vec<Channel>, expected: &[(ChannelId, &[ChannelId
|
||||
let actual = actual
|
||||
.iter()
|
||||
.map(|channel| (channel.id, channel.parent_path.as_slice()))
|
||||
.collect::<Vec<_>>();
|
||||
pretty_assertions::assert_eq!(
|
||||
actual,
|
||||
expected.to_vec(),
|
||||
"wrong channel ids and parent paths"
|
||||
);
|
||||
.collect::<HashSet<_>>();
|
||||
let expected = expected
|
||||
.iter()
|
||||
.map(|(id, parents)| (*id, *parents))
|
||||
.collect::<HashSet<_>>();
|
||||
pretty_assertions::assert_eq!(actual, expected, "wrong channel ids and parent paths");
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_channel_tree_order(actual: Vec<Channel>, expected: &[(ChannelId, &[ChannelId], i32)]) {
|
||||
let actual = actual
|
||||
.iter()
|
||||
.map(|channel| {
|
||||
(
|
||||
channel.id,
|
||||
channel.parent_path.as_slice(),
|
||||
channel.channel_order,
|
||||
)
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
let expected = expected
|
||||
.iter()
|
||||
.map(|(id, parents, order)| (*id, *parents, *order))
|
||||
.collect::<HashSet<_>>();
|
||||
pretty_assertions::assert_eq!(actual, expected, "wrong channel ids and parent paths");
|
||||
}
|
||||
|
||||
@@ -384,6 +384,7 @@ impl Server {
|
||||
.add_request_handler(get_notifications)
|
||||
.add_request_handler(mark_notification_as_read)
|
||||
.add_request_handler(move_channel)
|
||||
.add_request_handler(reorder_channel)
|
||||
.add_request_handler(follow)
|
||||
.add_message_handler(unfollow)
|
||||
.add_message_handler(update_followers)
|
||||
@@ -433,6 +434,16 @@ impl Server {
|
||||
tracing::info!("waiting for cleanup timeout");
|
||||
timeout.await;
|
||||
tracing::info!("cleanup timeout expired, retrieving stale rooms");
|
||||
|
||||
app_state
|
||||
.db
|
||||
.delete_stale_channel_chat_participants(
|
||||
&app_state.config.zed_environment,
|
||||
server_id,
|
||||
)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
if let Some((room_ids, channel_ids)) = app_state
|
||||
.db
|
||||
.stale_server_resource_ids(&app_state.config.zed_environment, server_id)
|
||||
@@ -554,6 +565,21 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
app_state
|
||||
.db
|
||||
.delete_stale_channel_chat_participants(
|
||||
&app_state.config.zed_environment,
|
||||
server_id,
|
||||
)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
app_state
|
||||
.db
|
||||
.clear_old_worktree_entries(server_id)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
app_state
|
||||
.db
|
||||
.delete_stale_servers(&app_state.config.zed_environment, server_id)
|
||||
@@ -3195,6 +3221,51 @@ async fn move_channel(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn reorder_channel(
|
||||
request: proto::ReorderChannel,
|
||||
response: Response<proto::ReorderChannel>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let channel_id = ChannelId::from_proto(request.channel_id);
|
||||
let direction = request.direction();
|
||||
|
||||
let updated_channels = session
|
||||
.db()
|
||||
.await
|
||||
.reorder_channel(channel_id, direction, session.user_id())
|
||||
.await?;
|
||||
|
||||
if let Some(root_id) = updated_channels.first().map(|channel| channel.root_id()) {
|
||||
let connection_pool = session.connection_pool().await;
|
||||
for (connection_id, role) in connection_pool.channel_connection_ids(root_id) {
|
||||
let channels = updated_channels
|
||||
.iter()
|
||||
.filter_map(|channel| {
|
||||
if role.can_see_channel(channel.visibility) {
|
||||
Some(channel.to_proto())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if channels.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let update = proto::UpdateChannels {
|
||||
channels,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
session.peer.send(connection_id, update.clone())?;
|
||||
}
|
||||
}
|
||||
|
||||
response.send(Ack {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the list of channel members
|
||||
async fn get_channel_members(
|
||||
request: proto::GetChannelMembers,
|
||||
|
||||
@@ -2624,6 +2624,7 @@ async fn test_git_diff_base_change(
|
||||
client_a.fs().set_head_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("a.txt".into(), committed_text.clone())],
|
||||
"deadbeef",
|
||||
);
|
||||
|
||||
// Create the buffer
|
||||
@@ -2717,6 +2718,7 @@ async fn test_git_diff_base_change(
|
||||
client_a.fs().set_head_for_repo(
|
||||
Path::new("/dir/.git"),
|
||||
&[("a.txt".into(), new_committed_text.clone())],
|
||||
"deadbeef",
|
||||
);
|
||||
|
||||
// Wait for buffer_local_a to receive it
|
||||
@@ -3006,6 +3008,7 @@ async fn test_git_status_sync(
|
||||
client_a.fs().set_head_for_repo(
|
||||
path!("/dir/.git").as_ref(),
|
||||
&[("b.txt".into(), "B".into()), ("c.txt".into(), "c".into())],
|
||||
"deadbeef",
|
||||
);
|
||||
client_a.fs().set_index_for_repo(
|
||||
path!("/dir/.git").as_ref(),
|
||||
|
||||
@@ -89,6 +89,7 @@ impl CompletionProvider for MessageEditorCompletionProvider {
|
||||
_position: language::Anchor,
|
||||
text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
text == "@"
|
||||
|
||||
@@ -14,9 +14,9 @@ use fuzzy::{StringMatchCandidate, match_strings};
|
||||
use gpui::{
|
||||
AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, Context, DismissEvent,
|
||||
Div, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, InteractiveElement, IntoElement,
|
||||
ListOffset, ListState, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render,
|
||||
SharedString, Styled, Subscription, Task, TextStyle, WeakEntity, Window, actions, anchored,
|
||||
canvas, deferred, div, fill, list, point, prelude::*, px,
|
||||
KeyContext, ListOffset, ListState, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel,
|
||||
Render, SharedString, Styled, Subscription, Task, TextStyle, WeakEntity, Window, actions,
|
||||
anchored, canvas, deferred, div, fill, list, point, prelude::*, px,
|
||||
};
|
||||
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrevious};
|
||||
use project::{Fs, Project};
|
||||
@@ -52,6 +52,8 @@ actions!(
|
||||
StartMoveChannel,
|
||||
MoveSelected,
|
||||
InsertSpace,
|
||||
MoveChannelUp,
|
||||
MoveChannelDown,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -1961,6 +1963,33 @@ impl CollabPanel {
|
||||
})
|
||||
}
|
||||
|
||||
fn move_channel_up(&mut self, _: &MoveChannelUp, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(channel) = self.selected_channel() {
|
||||
self.channel_store.update(cx, |store, cx| {
|
||||
store
|
||||
.reorder_channel(channel.id, proto::reorder_channel::Direction::Up, cx)
|
||||
.detach_and_prompt_err("Failed to move channel up", window, cx, |_, _, _| None)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn move_channel_down(
|
||||
&mut self,
|
||||
_: &MoveChannelDown,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(channel) = self.selected_channel() {
|
||||
self.channel_store.update(cx, |store, cx| {
|
||||
store
|
||||
.reorder_channel(channel.id, proto::reorder_channel::Direction::Down, cx)
|
||||
.detach_and_prompt_err("Failed to move channel down", window, cx, |_, _, _| {
|
||||
None
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn open_channel_notes(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
@@ -1974,7 +2003,7 @@ impl CollabPanel {
|
||||
|
||||
fn show_inline_context_menu(
|
||||
&mut self,
|
||||
_: &menu::SecondaryConfirm,
|
||||
_: &Secondary,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -2003,6 +2032,21 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn dispatch_context(&self, window: &Window, cx: &Context<Self>) -> KeyContext {
|
||||
let mut dispatch_context = KeyContext::new_with_defaults();
|
||||
dispatch_context.add("CollabPanel");
|
||||
dispatch_context.add("menu");
|
||||
|
||||
let identifier = if self.channel_name_editor.focus_handle(cx).is_focused(window) {
|
||||
"editing"
|
||||
} else {
|
||||
"not_editing"
|
||||
};
|
||||
|
||||
dispatch_context.add(identifier);
|
||||
dispatch_context
|
||||
}
|
||||
|
||||
fn selected_channel(&self) -> Option<&Arc<Channel>> {
|
||||
self.selection
|
||||
.and_then(|ix| self.entries.get(ix))
|
||||
@@ -2965,7 +3009,7 @@ fn render_tree_branch(
|
||||
impl Render for CollabPanel {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.key_context("CollabPanel")
|
||||
.key_context(self.dispatch_context(window, cx))
|
||||
.on_action(cx.listener(CollabPanel::cancel))
|
||||
.on_action(cx.listener(CollabPanel::select_next))
|
||||
.on_action(cx.listener(CollabPanel::select_previous))
|
||||
@@ -2977,6 +3021,8 @@ impl Render for CollabPanel {
|
||||
.on_action(cx.listener(CollabPanel::collapse_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::expand_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::start_move_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::move_channel_up))
|
||||
.on_action(cx.listener(CollabPanel::move_channel_down))
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.size_full()
|
||||
.child(if self.user_store.read(cx).current_user().is_none() {
|
||||
|
||||
@@ -448,7 +448,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
fn humanize_action_name(name: &str) -> String {
|
||||
pub fn humanize_action_name(name: &str) -> String {
|
||||
let capacity = name.len() + name.chars().filter(|c| c.is_uppercase()).count();
|
||||
let mut result = String::with_capacity(capacity);
|
||||
for char in name.chars() {
|
||||
|
||||
@@ -161,7 +161,7 @@ impl ComponentMetadata {
|
||||
}
|
||||
|
||||
/// Implement this trait to define a UI component. This will allow you to
|
||||
/// derive `RegisterComponent` on it, in tutn allowing you to preview the
|
||||
/// derive `RegisterComponent` on it, in turn allowing you to preview the
|
||||
/// contents of the preview fn in `workspace: open component preview`.
|
||||
///
|
||||
/// This can be useful for visual debugging and testing, documenting UI
|
||||
|
||||
@@ -333,24 +333,6 @@ pub async fn download_adapter_from_github(
|
||||
Ok(version_path)
|
||||
}
|
||||
|
||||
pub async fn fetch_latest_adapter_version_from_github(
|
||||
github_repo: GithubRepo,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", github_repo.repo_owner, github_repo.repo_name),
|
||||
false,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(AdapterVersion {
|
||||
tag_name: release.tag_name,
|
||||
url: release.zipball_url,
|
||||
})
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait DebugAdapter: 'static + Send + Sync {
|
||||
fn name(&self) -> DebugAdapterName;
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use crate::*;
|
||||
use anyhow::Context as _;
|
||||
use dap::adapters::latest_github_release;
|
||||
use dap::{DebugRequest, StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use gpui::{AppContext, AsyncApp, SharedString};
|
||||
use json_dotpath::DotPaths;
|
||||
use language::{LanguageName, Toolchain};
|
||||
use serde_json::Value;
|
||||
@@ -21,12 +22,13 @@ pub(crate) struct PythonDebugAdapter {
|
||||
|
||||
impl PythonDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "Debugpy";
|
||||
const DEBUG_ADAPTER_NAME: DebugAdapterName =
|
||||
DebugAdapterName(SharedString::new_static(Self::ADAPTER_NAME));
|
||||
const ADAPTER_PACKAGE_NAME: &'static str = "debugpy";
|
||||
const ADAPTER_PATH: &'static str = "src/debugpy/adapter";
|
||||
const LANGUAGE_NAME: &'static str = "Python";
|
||||
|
||||
async fn generate_debugpy_arguments(
|
||||
&self,
|
||||
host: &Ipv4Addr,
|
||||
port: u16,
|
||||
user_installed_path: Option<&Path>,
|
||||
@@ -54,7 +56,7 @@ impl PythonDebugAdapter {
|
||||
format!("--port={}", port),
|
||||
])
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
let adapter_path = paths::debug_adapters_dir().join(Self::DEBUG_ADAPTER_NAME.as_ref());
|
||||
let file_name_prefix = format!("{}_", Self::ADAPTER_NAME);
|
||||
|
||||
let debugpy_dir =
|
||||
@@ -107,22 +109,21 @@ impl PythonDebugAdapter {
|
||||
repo_owner: "microsoft".into(),
|
||||
};
|
||||
|
||||
adapters::fetch_latest_adapter_version_from_github(github_repo, delegate.as_ref()).await
|
||||
fetch_latest_adapter_version_from_github(github_repo, delegate.as_ref()).await
|
||||
}
|
||||
|
||||
async fn install_binary(
|
||||
&self,
|
||||
adapter_name: DebugAdapterName,
|
||||
version: AdapterVersion,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: Arc<dyn DapDelegate>,
|
||||
) -> Result<()> {
|
||||
let version_path = adapters::download_adapter_from_github(
|
||||
self.name(),
|
||||
adapter_name,
|
||||
version,
|
||||
adapters::DownloadedFileType::Zip,
|
||||
adapters::DownloadedFileType::GzipTar,
|
||||
delegate.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// only needed when you install the latest version for the first time
|
||||
if let Some(debugpy_dir) =
|
||||
util::fs::find_file_name_in_dir(version_path.as_path(), |file_name| {
|
||||
@@ -171,14 +172,13 @@ impl PythonDebugAdapter {
|
||||
let python_command = python_path.context("failed to find binary path for Python")?;
|
||||
log::debug!("Using Python executable: {}", python_command);
|
||||
|
||||
let arguments = self
|
||||
.generate_debugpy_arguments(
|
||||
&host,
|
||||
port,
|
||||
user_installed_path.as_deref(),
|
||||
installed_in_venv,
|
||||
)
|
||||
.await?;
|
||||
let arguments = Self::generate_debugpy_arguments(
|
||||
&host,
|
||||
port,
|
||||
user_installed_path.as_deref(),
|
||||
installed_in_venv,
|
||||
)
|
||||
.await?;
|
||||
|
||||
log::debug!(
|
||||
"Starting debugpy adapter with command: {} {}",
|
||||
@@ -204,7 +204,7 @@ impl PythonDebugAdapter {
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for PythonDebugAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
Self::DEBUG_ADAPTER_NAME
|
||||
}
|
||||
|
||||
fn adapter_language_name(&self) -> Option<LanguageName> {
|
||||
@@ -635,7 +635,9 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
if self.checked.set(()).is_ok() {
|
||||
delegate.output_to_console(format!("Checking latest version of {}...", self.name()));
|
||||
if let Some(version) = self.fetch_latest_adapter_version(delegate).await.log_err() {
|
||||
self.install_binary(version, delegate).await?;
|
||||
cx.background_spawn(Self::install_binary(self.name(), version, delegate.clone()))
|
||||
.await
|
||||
.context("Failed to install debugpy")?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -644,6 +646,24 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version_from_github(
|
||||
github_repo: GithubRepo,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", github_repo.repo_owner, github_repo.repo_name),
|
||||
false,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(AdapterVersion {
|
||||
tag_name: release.tag_name,
|
||||
url: release.tarball_url,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -651,20 +671,18 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_debugpy_install_path_cases() {
|
||||
let adapter = PythonDebugAdapter::default();
|
||||
let host = Ipv4Addr::new(127, 0, 0, 1);
|
||||
let port = 5678;
|
||||
|
||||
// Case 1: User-defined debugpy path (highest precedence)
|
||||
let user_path = PathBuf::from("/custom/path/to/debugpy");
|
||||
let user_args = adapter
|
||||
.generate_debugpy_arguments(&host, port, Some(&user_path), false)
|
||||
.await
|
||||
.unwrap();
|
||||
let user_args =
|
||||
PythonDebugAdapter::generate_debugpy_arguments(&host, port, Some(&user_path), false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Case 2: Venv-installed debugpy (uses -m debugpy.adapter)
|
||||
let venv_args = adapter
|
||||
.generate_debugpy_arguments(&host, port, None, true)
|
||||
let venv_args = PythonDebugAdapter::generate_debugpy_arguments(&host, port, None, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -679,9 +697,4 @@ mod tests {
|
||||
|
||||
// Note: Case 3 (GitHub-downloaded debugpy) is not tested since this requires mocking the Github API.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_adapter_path_constant() {
|
||||
assert_eq!(PythonDebugAdapter::ADAPTER_PATH, "src/debugpy/adapter");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,6 +286,7 @@ pub(crate) fn new_debugger_pane(
|
||||
&new_pane,
|
||||
item_id_to_move,
|
||||
new_pane.read(cx).active_item_index(),
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -901,7 +902,6 @@ impl RunningState {
|
||||
weak_workspace,
|
||||
None,
|
||||
weak_project,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1055,15 +1055,7 @@ impl RunningState {
|
||||
let terminal = terminal_task.await?;
|
||||
|
||||
let terminal_view = cx.new_window_entity(|window, cx| {
|
||||
TerminalView::new(
|
||||
terminal.clone(),
|
||||
workspace,
|
||||
None,
|
||||
weak_project,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
TerminalView::new(terminal.clone(), workspace, None, weak_project, window, cx)
|
||||
})?;
|
||||
|
||||
running.update_in(cx, |running, window, cx| {
|
||||
|
||||
@@ -309,6 +309,7 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
|
||||
_position: language::Anchor,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
true
|
||||
|
||||
@@ -15,6 +15,9 @@ settings.workspace = true
|
||||
regex.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
zed.workspace = true
|
||||
gpui.workspace = true
|
||||
command_palette.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -5,6 +5,7 @@ use mdbook::book::{Book, Chapter};
|
||||
use mdbook::preprocess::CmdPreprocessor;
|
||||
use regex::Regex;
|
||||
use settings::KeymapFile;
|
||||
use std::collections::HashSet;
|
||||
use std::io::{self, Read};
|
||||
use std::process;
|
||||
use std::sync::LazyLock;
|
||||
@@ -17,6 +18,8 @@ static KEYMAP_LINUX: LazyLock<KeymapFile> = LazyLock::new(|| {
|
||||
load_keymap("keymaps/default-linux.json").expect("Failed to load Linux keymap")
|
||||
});
|
||||
|
||||
static ALL_ACTIONS: LazyLock<Vec<ActionDef>> = LazyLock::new(dump_all_gpui_actions);
|
||||
|
||||
pub fn make_app() -> Command {
|
||||
Command::new("zed-docs-preprocessor")
|
||||
.about("Preprocesses Zed Docs content to provide rich action & keybinding support and more")
|
||||
@@ -29,6 +32,9 @@ pub fn make_app() -> Command {
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let matches = make_app().get_matches();
|
||||
// call a zed:: function so everything in `zed` crate is linked and
|
||||
// all actions in the actual app are registered
|
||||
zed::stdout_is_a_pty();
|
||||
|
||||
if let Some(sub_args) = matches.subcommand_matches("supports") {
|
||||
handle_supports(sub_args);
|
||||
@@ -39,6 +45,43 @@ fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
enum Error {
|
||||
ActionNotFound { action_name: String },
|
||||
DeprecatedActionUsed { used: String, should_be: String },
|
||||
}
|
||||
|
||||
impl Error {
|
||||
fn new_for_not_found_action(action_name: String) -> Self {
|
||||
for action in &*ALL_ACTIONS {
|
||||
for alias in action.deprecated_aliases {
|
||||
if alias == &action_name {
|
||||
return Error::DeprecatedActionUsed {
|
||||
used: action_name.clone(),
|
||||
should_be: action.name.to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
Error::ActionNotFound {
|
||||
action_name: action_name.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Error::ActionNotFound { action_name } => write!(f, "Action not found: {}", action_name),
|
||||
Error::DeprecatedActionUsed { used, should_be } => write!(
|
||||
f,
|
||||
"Deprecated action used: {} should be {}",
|
||||
used, should_be
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_preprocessing() -> Result<()> {
|
||||
let mut stdin = io::stdin();
|
||||
let mut input = String::new();
|
||||
@@ -46,8 +89,19 @@ fn handle_preprocessing() -> Result<()> {
|
||||
|
||||
let (_ctx, mut book) = CmdPreprocessor::parse_input(input.as_bytes())?;
|
||||
|
||||
template_keybinding(&mut book);
|
||||
template_action(&mut book);
|
||||
let mut errors = HashSet::<Error>::new();
|
||||
|
||||
template_and_validate_keybindings(&mut book, &mut errors);
|
||||
template_and_validate_actions(&mut book, &mut errors);
|
||||
|
||||
if !errors.is_empty() {
|
||||
const ANSI_RED: &'static str = "\x1b[31m";
|
||||
const ANSI_RESET: &'static str = "\x1b[0m";
|
||||
for error in &errors {
|
||||
eprintln!("{ANSI_RED}ERROR{ANSI_RESET}: {}", error);
|
||||
}
|
||||
return Err(anyhow::anyhow!("Found {} errors in docs", errors.len()));
|
||||
}
|
||||
|
||||
serde_json::to_writer(io::stdout(), &book)?;
|
||||
|
||||
@@ -66,13 +120,17 @@ fn handle_supports(sub_args: &ArgMatches) -> ! {
|
||||
}
|
||||
}
|
||||
|
||||
fn template_keybinding(book: &mut Book) {
|
||||
fn template_and_validate_keybindings(book: &mut Book, errors: &mut HashSet<Error>) {
|
||||
let regex = Regex::new(r"\{#kb (.*?)\}").unwrap();
|
||||
|
||||
for_each_chapter_mut(book, |chapter| {
|
||||
chapter.content = regex
|
||||
.replace_all(&chapter.content, |caps: ®ex::Captures| {
|
||||
let action = caps[1].trim();
|
||||
if find_action_by_name(action).is_none() {
|
||||
errors.insert(Error::new_for_not_found_action(action.to_string()));
|
||||
return String::new();
|
||||
}
|
||||
let macos_binding = find_binding("macos", action).unwrap_or_default();
|
||||
let linux_binding = find_binding("linux", action).unwrap_or_default();
|
||||
|
||||
@@ -86,35 +144,30 @@ fn template_keybinding(book: &mut Book) {
|
||||
});
|
||||
}
|
||||
|
||||
fn template_action(book: &mut Book) {
|
||||
fn template_and_validate_actions(book: &mut Book, errors: &mut HashSet<Error>) {
|
||||
let regex = Regex::new(r"\{#action (.*?)\}").unwrap();
|
||||
|
||||
for_each_chapter_mut(book, |chapter| {
|
||||
chapter.content = regex
|
||||
.replace_all(&chapter.content, |caps: ®ex::Captures| {
|
||||
let name = caps[1].trim();
|
||||
|
||||
let formatted_name = name
|
||||
.chars()
|
||||
.enumerate()
|
||||
.map(|(i, c)| {
|
||||
if i > 0 && c.is_uppercase() {
|
||||
format!(" {}", c.to_lowercase())
|
||||
} else {
|
||||
c.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.to_string()
|
||||
.replace("::", ":");
|
||||
|
||||
format!("<code class=\"hljs\">{}</code>", formatted_name)
|
||||
let Some(action) = find_action_by_name(name) else {
|
||||
errors.insert(Error::new_for_not_found_action(name.to_string()));
|
||||
return String::new();
|
||||
};
|
||||
format!("<code class=\"hljs\">{}</code>", &action.human_name)
|
||||
})
|
||||
.into_owned()
|
||||
});
|
||||
}
|
||||
|
||||
fn find_action_by_name(name: &str) -> Option<&ActionDef> {
|
||||
ALL_ACTIONS
|
||||
.binary_search_by(|action| action.name.cmp(name))
|
||||
.ok()
|
||||
.map(|index| &ALL_ACTIONS[index])
|
||||
}
|
||||
|
||||
fn find_binding(os: &str, action: &str) -> Option<String> {
|
||||
let keymap = match os {
|
||||
"macos" => &KEYMAP_MACOS,
|
||||
@@ -180,3 +233,25 @@ where
|
||||
func(chapter);
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Serialize)]
|
||||
struct ActionDef {
|
||||
name: &'static str,
|
||||
human_name: String,
|
||||
deprecated_aliases: &'static [&'static str],
|
||||
}
|
||||
|
||||
fn dump_all_gpui_actions() -> Vec<ActionDef> {
|
||||
let mut actions = gpui::generate_list_of_all_registered_actions()
|
||||
.into_iter()
|
||||
.map(|action| ActionDef {
|
||||
name: action.name,
|
||||
human_name: command_palette::humanize_action_name(action.name),
|
||||
deprecated_aliases: action.aliases,
|
||||
})
|
||||
.collect::<Vec<ActionDef>>();
|
||||
|
||||
actions.sort_by_key(|a| a.name);
|
||||
|
||||
return actions;
|
||||
}
|
||||
|
||||
@@ -194,6 +194,7 @@ pub enum ContextMenuOrigin {
|
||||
|
||||
pub struct CompletionsMenu {
|
||||
pub id: CompletionId,
|
||||
pub source: CompletionsMenuSource,
|
||||
sort_completions: bool,
|
||||
pub initial_position: Anchor,
|
||||
pub initial_query: Option<Arc<String>>,
|
||||
@@ -208,7 +209,6 @@ pub struct CompletionsMenu {
|
||||
scroll_handle: UniformListScrollHandle,
|
||||
resolve_completions: bool,
|
||||
show_completion_documentation: bool,
|
||||
pub(super) ignore_completion_provider: bool,
|
||||
last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
|
||||
markdown_cache: Rc<RefCell<VecDeque<(MarkdownCacheKey, Entity<Markdown>)>>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
@@ -227,6 +227,13 @@ enum MarkdownCacheKey {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum CompletionsMenuSource {
|
||||
Normal,
|
||||
SnippetChoices,
|
||||
Words,
|
||||
}
|
||||
|
||||
// TODO: There should really be a wrapper around fuzzy match tasks that does this.
|
||||
impl Drop for CompletionsMenu {
|
||||
fn drop(&mut self) {
|
||||
@@ -237,9 +244,9 @@ impl Drop for CompletionsMenu {
|
||||
impl CompletionsMenu {
|
||||
pub fn new(
|
||||
id: CompletionId,
|
||||
source: CompletionsMenuSource,
|
||||
sort_completions: bool,
|
||||
show_completion_documentation: bool,
|
||||
ignore_completion_provider: bool,
|
||||
initial_position: Anchor,
|
||||
initial_query: Option<Arc<String>>,
|
||||
is_incomplete: bool,
|
||||
@@ -258,13 +265,13 @@ impl CompletionsMenu {
|
||||
|
||||
let completions_menu = Self {
|
||||
id,
|
||||
source,
|
||||
sort_completions,
|
||||
initial_position,
|
||||
initial_query,
|
||||
is_incomplete,
|
||||
buffer,
|
||||
show_completion_documentation,
|
||||
ignore_completion_provider,
|
||||
completions: RefCell::new(completions).into(),
|
||||
match_candidates,
|
||||
entries: Rc::new(RefCell::new(Box::new([]))),
|
||||
@@ -328,6 +335,7 @@ impl CompletionsMenu {
|
||||
.collect();
|
||||
Self {
|
||||
id,
|
||||
source: CompletionsMenuSource::SnippetChoices,
|
||||
sort_completions,
|
||||
initial_position: selection.start,
|
||||
initial_query: None,
|
||||
@@ -342,7 +350,6 @@ impl CompletionsMenu {
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
resolve_completions: false,
|
||||
show_completion_documentation: false,
|
||||
ignore_completion_provider: false,
|
||||
last_rendered_range: RefCell::new(None).into(),
|
||||
markdown_cache: RefCell::new(VecDeque::new()).into(),
|
||||
language_registry: None,
|
||||
|
||||
@@ -639,6 +639,7 @@ pub struct HighlightedChunk<'a> {
|
||||
pub text: &'a str,
|
||||
pub style: Option<HighlightStyle>,
|
||||
pub is_tab: bool,
|
||||
pub is_inlay: bool,
|
||||
pub replacement: Option<ChunkReplacement>,
|
||||
}
|
||||
|
||||
@@ -652,6 +653,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
let style = self.style;
|
||||
let is_tab = self.is_tab;
|
||||
let renderer = self.replacement;
|
||||
let is_inlay = self.is_inlay;
|
||||
iter::from_fn(move || {
|
||||
let mut prefix_len = 0;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
@@ -667,6 +669,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style,
|
||||
is_tab,
|
||||
is_inlay,
|
||||
replacement: renderer.clone(),
|
||||
});
|
||||
}
|
||||
@@ -693,6 +696,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
is_inlay,
|
||||
replacement: Some(ChunkReplacement::Str(replacement.into())),
|
||||
});
|
||||
} else {
|
||||
@@ -716,6 +720,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
is_inlay,
|
||||
replacement: renderer.clone(),
|
||||
});
|
||||
}
|
||||
@@ -728,6 +733,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: remainder,
|
||||
style,
|
||||
is_tab,
|
||||
is_inlay,
|
||||
replacement: renderer.clone(),
|
||||
})
|
||||
} else {
|
||||
@@ -961,7 +967,10 @@ impl DisplaySnapshot {
|
||||
if chunk.is_unnecessary {
|
||||
diagnostic_highlight.fade_out = Some(editor_style.unnecessary_code_fade);
|
||||
}
|
||||
if chunk.underline && editor_style.show_underlines {
|
||||
if chunk.underline
|
||||
&& editor_style.show_underlines
|
||||
&& !(chunk.is_unnecessary && severity > lsp::DiagnosticSeverity::WARNING)
|
||||
{
|
||||
let diagnostic_color = super::diagnostic_style(severity, &editor_style.status);
|
||||
diagnostic_highlight.underline = Some(UnderlineStyle {
|
||||
color: Some(diagnostic_color),
|
||||
@@ -981,6 +990,7 @@ impl DisplaySnapshot {
|
||||
text: chunk.text,
|
||||
style: highlight_style,
|
||||
is_tab: chunk.is_tab,
|
||||
is_inlay: chunk.is_inlay,
|
||||
replacement: chunk.renderer.map(ChunkReplacement::Renderer),
|
||||
}
|
||||
.highlight_invisibles(editor_style)
|
||||
@@ -2512,7 +2522,9 @@ pub mod tests {
|
||||
cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(5), &map, &theme, cx)),
|
||||
[
|
||||
("fn \n".to_string(), None),
|
||||
("oute\nr".to_string(), Some(Hsla::blue())),
|
||||
("oute".to_string(), Some(Hsla::blue())),
|
||||
("\n".to_string(), None),
|
||||
("r".to_string(), Some(Hsla::blue())),
|
||||
("() \n{}\n\n".to_string(), None),
|
||||
]
|
||||
);
|
||||
@@ -2535,8 +2547,11 @@ pub mod tests {
|
||||
[
|
||||
("out".to_string(), Some(Hsla::blue())),
|
||||
("⋯\n".to_string(), None),
|
||||
(" \nfn ".to_string(), Some(Hsla::red())),
|
||||
("i\n".to_string(), Some(Hsla::blue()))
|
||||
(" ".to_string(), Some(Hsla::red())),
|
||||
("\n".to_string(), None),
|
||||
("fn ".to_string(), Some(Hsla::red())),
|
||||
("i".to_string(), Some(Hsla::blue())),
|
||||
("\n".to_string(), None)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1259,6 +1259,8 @@ pub struct Chunk<'a> {
|
||||
pub underline: bool,
|
||||
/// Whether this chunk of text was originally a tab character.
|
||||
pub is_tab: bool,
|
||||
/// Whether this chunk of text was originally a tab character.
|
||||
pub is_inlay: bool,
|
||||
/// An optional recipe for how the chunk should be presented.
|
||||
pub renderer: Option<ChunkRenderer>,
|
||||
}
|
||||
@@ -1424,6 +1426,7 @@ impl<'a> Iterator for FoldChunks<'a> {
|
||||
diagnostic_severity: chunk.diagnostic_severity,
|
||||
is_unnecessary: chunk.is_unnecessary,
|
||||
is_tab: chunk.is_tab,
|
||||
is_inlay: chunk.is_inlay,
|
||||
underline: chunk.underline,
|
||||
renderer: None,
|
||||
});
|
||||
|
||||
@@ -336,6 +336,7 @@ impl<'a> Iterator for InlayChunks<'a> {
|
||||
Chunk {
|
||||
text: chunk,
|
||||
highlight_style,
|
||||
is_inlay: true,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -933,7 +933,7 @@ impl<'a> Iterator for WrapChunks<'a> {
|
||||
self.transforms.next(&());
|
||||
return Some(Chunk {
|
||||
text: &display_text[start_ix..end_ix],
|
||||
..self.input_chunk.clone()
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -211,8 +211,11 @@ use workspace::{
|
||||
searchable::SearchEvent,
|
||||
};
|
||||
|
||||
use crate::hover_links::{find_url, find_url_from_range};
|
||||
use crate::signature_help::{SignatureHelpHiddenBy, SignatureHelpState};
|
||||
use crate::{
|
||||
code_context_menus::CompletionsMenuSource,
|
||||
hover_links::{find_url, find_url_from_range},
|
||||
};
|
||||
|
||||
pub const FILE_HEADER_HEIGHT: u32 = 2;
|
||||
pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1;
|
||||
@@ -4510,30 +4513,40 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let ignore_completion_provider = self
|
||||
let completions_source = self
|
||||
.context_menu
|
||||
.borrow()
|
||||
.as_ref()
|
||||
.map(|menu| match menu {
|
||||
CodeContextMenu::Completions(completions_menu) => {
|
||||
completions_menu.ignore_completion_provider
|
||||
}
|
||||
CodeContextMenu::CodeActions(_) => false,
|
||||
})
|
||||
.unwrap_or(false);
|
||||
.and_then(|menu| match menu {
|
||||
CodeContextMenu::Completions(completions_menu) => Some(completions_menu.source),
|
||||
CodeContextMenu::CodeActions(_) => None,
|
||||
});
|
||||
|
||||
if ignore_completion_provider {
|
||||
self.show_word_completions(&ShowWordCompletions, window, cx);
|
||||
} else if self.is_completion_trigger(text, trigger_in_words, cx) {
|
||||
self.show_completions(
|
||||
&ShowCompletions {
|
||||
trigger: Some(text.to_owned()).filter(|x| !x.is_empty()),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
} else {
|
||||
self.hide_context_menu(window, cx);
|
||||
match completions_source {
|
||||
Some(CompletionsMenuSource::Words) => {
|
||||
self.show_word_completions(&ShowWordCompletions, window, cx)
|
||||
}
|
||||
Some(CompletionsMenuSource::Normal)
|
||||
| Some(CompletionsMenuSource::SnippetChoices)
|
||||
| None
|
||||
if self.is_completion_trigger(
|
||||
text,
|
||||
trigger_in_words,
|
||||
completions_source.is_some(),
|
||||
cx,
|
||||
) =>
|
||||
{
|
||||
self.show_completions(
|
||||
&ShowCompletions {
|
||||
trigger: Some(text.to_owned()).filter(|x| !x.is_empty()),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
self.hide_context_menu(window, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4541,6 +4554,7 @@ impl Editor {
|
||||
&self,
|
||||
text: &str,
|
||||
trigger_in_words: bool,
|
||||
menu_is_open: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
let position = self.selections.newest_anchor().head();
|
||||
@@ -4558,6 +4572,7 @@ impl Editor {
|
||||
position.text_anchor,
|
||||
text,
|
||||
trigger_in_words,
|
||||
menu_is_open,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
@@ -5008,7 +5023,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.open_or_update_completions_menu(true, None, window, cx);
|
||||
self.open_or_update_completions_menu(Some(CompletionsMenuSource::Words), None, window, cx);
|
||||
}
|
||||
|
||||
pub fn show_completions(
|
||||
@@ -5017,12 +5032,12 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.open_or_update_completions_menu(false, options.trigger.as_deref(), window, cx);
|
||||
self.open_or_update_completions_menu(None, options.trigger.as_deref(), window, cx);
|
||||
}
|
||||
|
||||
fn open_or_update_completions_menu(
|
||||
&mut self,
|
||||
ignore_completion_provider: bool,
|
||||
requested_source: Option<CompletionsMenuSource>,
|
||||
trigger: Option<&str>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -5047,10 +5062,13 @@ impl Editor {
|
||||
Self::completion_query(&self.buffer.read(cx).read(cx), position)
|
||||
.map(|query| query.into());
|
||||
|
||||
let provider = if ignore_completion_provider {
|
||||
None
|
||||
} else {
|
||||
self.completion_provider.clone()
|
||||
let provider = match requested_source {
|
||||
Some(CompletionsMenuSource::Normal) | None => self.completion_provider.clone(),
|
||||
Some(CompletionsMenuSource::Words) => None,
|
||||
Some(CompletionsMenuSource::SnippetChoices) => {
|
||||
log::error!("bug: SnippetChoices requested_source is not handled");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let sort_completions = provider
|
||||
@@ -5106,14 +5124,15 @@ impl Editor {
|
||||
trigger_kind,
|
||||
};
|
||||
|
||||
let (replace_range, word_kind) = buffer_snapshot.surrounding_word(buffer_position);
|
||||
let (replace_range, word_to_exclude) = if word_kind == Some(CharKind::Word) {
|
||||
let (word_replace_range, word_to_exclude) = if let (word_range, Some(CharKind::Word)) =
|
||||
buffer_snapshot.surrounding_word(buffer_position)
|
||||
{
|
||||
let word_to_exclude = buffer_snapshot
|
||||
.text_for_range(replace_range.clone())
|
||||
.text_for_range(word_range.clone())
|
||||
.collect::<String>();
|
||||
(
|
||||
buffer_snapshot.anchor_before(replace_range.start)
|
||||
..buffer_snapshot.anchor_after(replace_range.end),
|
||||
buffer_snapshot.anchor_before(word_range.start)
|
||||
..buffer_snapshot.anchor_after(buffer_position),
|
||||
Some(word_to_exclude),
|
||||
)
|
||||
} else {
|
||||
@@ -5221,7 +5240,7 @@ impl Editor {
|
||||
words.remove(&lsp_completion.new_text);
|
||||
}
|
||||
completions.extend(words.into_iter().map(|(word, word_range)| Completion {
|
||||
replace_range: replace_range.clone(),
|
||||
replace_range: word_replace_range.clone(),
|
||||
new_text: word.clone(),
|
||||
label: CodeLabel::plain(word, None),
|
||||
icon_path: None,
|
||||
@@ -5245,9 +5264,9 @@ impl Editor {
|
||||
.map(|workspace| workspace.read(cx).app_state().languages.clone());
|
||||
let menu = CompletionsMenu::new(
|
||||
id,
|
||||
requested_source.unwrap_or(CompletionsMenuSource::Normal),
|
||||
sort_completions,
|
||||
show_completion_documentation,
|
||||
ignore_completion_provider,
|
||||
position,
|
||||
query.clone(),
|
||||
is_incomplete,
|
||||
@@ -5531,14 +5550,12 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
let mut common_prefix_len = 0;
|
||||
for (a, b) in old_text.chars().zip(new_text.chars()) {
|
||||
if a == b {
|
||||
common_prefix_len += a.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let common_prefix_len = old_text
|
||||
.chars()
|
||||
.zip(new_text.chars())
|
||||
.take_while(|(a, b)| a == b)
|
||||
.map(|(a, _)| a.len_utf8())
|
||||
.sum::<usize>();
|
||||
|
||||
cx.emit(EditorEvent::InputHandled {
|
||||
utf16_range_to_replace: None,
|
||||
@@ -10856,14 +10873,54 @@ impl Editor {
|
||||
pub fn rewrap_impl(&mut self, options: RewrapOptions, cx: &mut Context<Self>) {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let mut selections = selections.iter().peekable();
|
||||
|
||||
// Shrink and split selections to respect paragraph boundaries.
|
||||
let ranges = selections.into_iter().flat_map(|selection| {
|
||||
let language_settings = buffer.language_settings_at(selection.head(), cx);
|
||||
let language_scope = buffer.language_scope_at(selection.head());
|
||||
|
||||
let Some(start_row) = (selection.start.row..=selection.end.row)
|
||||
.find(|row| !buffer.is_line_blank(MultiBufferRow(*row)))
|
||||
else {
|
||||
return vec![];
|
||||
};
|
||||
let Some(end_row) = (selection.start.row..=selection.end.row)
|
||||
.rev()
|
||||
.find(|row| !buffer.is_line_blank(MultiBufferRow(*row)))
|
||||
else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let mut row = start_row;
|
||||
let mut ranges = Vec::new();
|
||||
while let Some(blank_row) =
|
||||
(row..end_row).find(|row| buffer.is_line_blank(MultiBufferRow(*row)))
|
||||
{
|
||||
let next_paragraph_start = (blank_row + 1..=end_row)
|
||||
.find(|row| !buffer.is_line_blank(MultiBufferRow(*row)))
|
||||
.unwrap();
|
||||
ranges.push((
|
||||
language_settings.clone(),
|
||||
language_scope.clone(),
|
||||
Point::new(row, 0)..Point::new(blank_row - 1, 0),
|
||||
));
|
||||
row = next_paragraph_start;
|
||||
}
|
||||
ranges.push((
|
||||
language_settings.clone(),
|
||||
language_scope.clone(),
|
||||
Point::new(row, 0)..Point::new(end_row, 0),
|
||||
));
|
||||
|
||||
ranges
|
||||
});
|
||||
|
||||
let mut edits = Vec::new();
|
||||
let mut rewrapped_row_ranges = Vec::<RangeInclusive<u32>>::new();
|
||||
|
||||
while let Some(selection) = selections.next() {
|
||||
let mut start_row = selection.start.row;
|
||||
let mut end_row = selection.end.row;
|
||||
for (language_settings, language_scope, range) in ranges {
|
||||
let mut start_row = range.start.row;
|
||||
let mut end_row = range.end.row;
|
||||
|
||||
// Skip selections that overlap with a range that has already been rewrapped.
|
||||
let selection_range = start_row..end_row;
|
||||
@@ -10874,7 +10931,7 @@ impl Editor {
|
||||
continue;
|
||||
}
|
||||
|
||||
let tab_size = buffer.language_settings_at(selection.head(), cx).tab_size;
|
||||
let tab_size = language_settings.tab_size;
|
||||
|
||||
// Since not all lines in the selection may be at the same indent
|
||||
// level, choose the indent size that is the most common between all
|
||||
@@ -10905,25 +10962,20 @@ impl Editor {
|
||||
let mut line_prefix = indent_size.chars().collect::<String>();
|
||||
|
||||
let mut inside_comment = false;
|
||||
if let Some(comment_prefix) =
|
||||
buffer
|
||||
.language_scope_at(selection.head())
|
||||
.and_then(|language| {
|
||||
language
|
||||
.line_comment_prefixes()
|
||||
.iter()
|
||||
.find(|prefix| buffer.contains_str_at(indent_end, prefix))
|
||||
.cloned()
|
||||
})
|
||||
{
|
||||
if let Some(comment_prefix) = language_scope.and_then(|language| {
|
||||
language
|
||||
.line_comment_prefixes()
|
||||
.iter()
|
||||
.find(|prefix| buffer.contains_str_at(indent_end, prefix))
|
||||
.cloned()
|
||||
}) {
|
||||
line_prefix.push_str(&comment_prefix);
|
||||
inside_comment = true;
|
||||
}
|
||||
|
||||
let language_settings = buffer.language_settings_at(selection.head(), cx);
|
||||
let allow_rewrap_based_on_language = match language_settings.allow_rewrap {
|
||||
RewrapBehavior::InComments => inside_comment,
|
||||
RewrapBehavior::InSelections => !selection.is_empty(),
|
||||
RewrapBehavior::InSelections => !range.is_empty(),
|
||||
RewrapBehavior::Anywhere => true,
|
||||
};
|
||||
|
||||
@@ -10934,11 +10986,12 @@ impl Editor {
|
||||
continue;
|
||||
}
|
||||
|
||||
if selection.is_empty() {
|
||||
if range.is_empty() {
|
||||
'expand_upwards: while start_row > 0 {
|
||||
let prev_row = start_row - 1;
|
||||
if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix)
|
||||
&& buffer.line_len(MultiBufferRow(prev_row)) as usize > line_prefix.len()
|
||||
&& !buffer.is_line_blank(MultiBufferRow(prev_row))
|
||||
{
|
||||
start_row = prev_row;
|
||||
} else {
|
||||
@@ -10950,6 +11003,7 @@ impl Editor {
|
||||
let next_row = end_row + 1;
|
||||
if buffer.contains_str_at(Point::new(next_row, 0), &line_prefix)
|
||||
&& buffer.line_len(MultiBufferRow(next_row)) as usize > line_prefix.len()
|
||||
&& !buffer.is_line_blank(MultiBufferRow(next_row))
|
||||
{
|
||||
end_row = next_row;
|
||||
} else {
|
||||
@@ -20294,6 +20348,7 @@ pub trait CompletionProvider {
|
||||
position: language::Anchor,
|
||||
text: &str,
|
||||
trigger_in_words: bool,
|
||||
menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool;
|
||||
|
||||
@@ -20611,6 +20666,7 @@ impl CompletionProvider for Entity<Project> {
|
||||
position: language::Anchor,
|
||||
text: &str,
|
||||
trigger_in_words: bool,
|
||||
menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let mut chars = text.chars();
|
||||
@@ -20625,7 +20681,7 @@ impl CompletionProvider for Entity<Project> {
|
||||
|
||||
let buffer = buffer.read(cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
if !snapshot.settings_at(position, cx).show_completions_on_input {
|
||||
if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input {
|
||||
return false;
|
||||
}
|
||||
let classifier = snapshot.char_classifier_at(position).for_completion(true);
|
||||
|
||||
@@ -1912,19 +1912,19 @@ fn test_prev_next_word_boundary(cx: &mut TestAppContext) {
|
||||
assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
|
||||
assert_selection_ranges("use stdˇ::str::{foo, bar}\n\n ˇ{baz.qux()}", editor, cx);
|
||||
assert_selection_ranges("use stdˇ::str::{foo, bar}\n\nˇ {baz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
|
||||
assert_selection_ranges("use ˇstd::str::{foo, bar}\n\nˇ {baz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
|
||||
assert_selection_ranges("ˇuse std::str::{foo, bar}\nˇ\n {baz.qux()}", editor, cx);
|
||||
assert_selection_ranges("use ˇstd::str::{foo, bar}\nˇ\n {baz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
|
||||
assert_selection_ranges("ˇuse std::str::{foo, barˇ}\n\n {baz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
|
||||
assert_selection_ranges("ˇuse std::str::{foo, ˇbar}\n\n {baz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
|
||||
assert_selection_ranges("useˇ std::str::{foo, bar}ˇ\n\n {baz.qux()}", editor, cx);
|
||||
assert_selection_ranges("useˇ std::str::{foo, barˇ}\n\n {baz.qux()}", editor, cx);
|
||||
|
||||
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
|
||||
assert_selection_ranges("use stdˇ::str::{foo, bar}\nˇ\n {baz.qux()}", editor, cx);
|
||||
@@ -1942,7 +1942,7 @@ fn test_prev_next_word_boundary(cx: &mut TestAppContext) {
|
||||
|
||||
editor.select_to_previous_word_start(&SelectToPreviousWordStart, window, cx);
|
||||
assert_selection_ranges(
|
||||
"use std«ˇ::s»tr::{foo, bar}\n\n «ˇ{b»az.qux()}",
|
||||
"use std«ˇ::s»tr::{foo, bar}\n\n«ˇ {b»az.qux()}",
|
||||
editor,
|
||||
cx,
|
||||
);
|
||||
@@ -5111,7 +5111,7 @@ async fn test_rewrap(cx: &mut TestAppContext) {
|
||||
nisl venenatis tempus. Donec molestie blandit quam, et porta nunc laoreet in.
|
||||
Integer sit amet scelerisque nisi.
|
||||
"},
|
||||
plaintext_language,
|
||||
plaintext_language.clone(),
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
@@ -5174,6 +5174,69 @@ async fn test_rewrap(cx: &mut TestAppContext) {
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
assert_rewrap(
|
||||
indoc! {"
|
||||
«ˇone one one one one one one one one one one one one one one one one one one one one one one one one
|
||||
|
||||
two»
|
||||
|
||||
three
|
||||
|
||||
«ˇ\t
|
||||
|
||||
four four four four four four four four four four four four four four four four four four four four»
|
||||
|
||||
«ˇfive five five five five five five five five five five five five five five five five five five five
|
||||
\t»
|
||||
six six six six six six six six six six six six six six six six six six six six six six six six six
|
||||
"},
|
||||
indoc! {"
|
||||
«ˇone one one one one one one one one one one one one one one one one one one one
|
||||
one one one one one
|
||||
|
||||
two»
|
||||
|
||||
three
|
||||
|
||||
«ˇ\t
|
||||
|
||||
four four four four four four four four four four four four four four four four
|
||||
four four four four»
|
||||
|
||||
«ˇfive five five five five five five five five five five five five five five five
|
||||
five five five five
|
||||
\t»
|
||||
six six six six six six six six six six six six six six six six six six six six six six six six six
|
||||
"},
|
||||
plaintext_language.clone(),
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
assert_rewrap(
|
||||
indoc! {"
|
||||
//ˇ long long long long long long long long long long long long long long long long long long long long long long long long long long long long
|
||||
//ˇ
|
||||
//ˇ long long long long long long long long long long long long long long long long long long long long long long long long long long long long
|
||||
//ˇ short short short
|
||||
int main(void) {
|
||||
return 17;
|
||||
}
|
||||
"},
|
||||
indoc! {"
|
||||
//ˇ long long long long long long long long long long long long long long long
|
||||
// long long long long long long long long long long long long long
|
||||
//ˇ
|
||||
//ˇ long long long long long long long long long long long long long long long
|
||||
//ˇ long long long long long long long long long long long long long short short
|
||||
// short
|
||||
int main(void) {
|
||||
return 17;
|
||||
}
|
||||
"},
|
||||
language_with_c_comments,
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
#[track_caller]
|
||||
fn assert_rewrap(
|
||||
unwrapped_text: &str,
|
||||
@@ -17860,6 +17923,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
|
||||
("file-2".into(), "two\n".into()),
|
||||
("file-3".into(), "three\n".into()),
|
||||
],
|
||||
"deadbeef",
|
||||
);
|
||||
|
||||
let project = Project::test(fs, [path!("/test").as_ref()], cx).await;
|
||||
@@ -21227,6 +21291,7 @@ fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
point..point
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_selection_ranges(marked_text: &str, editor: &mut Editor, cx: &mut Context<Editor>) {
|
||||
let (text, ranges) = marked_text_ranges(marked_text, true);
|
||||
assert_eq!(editor.text(cx), text);
|
||||
|
||||
@@ -6871,6 +6871,7 @@ impl LineWithInvisibles {
|
||||
text: "\n",
|
||||
style: None,
|
||||
is_tab: false,
|
||||
is_inlay: false,
|
||||
replacement: None,
|
||||
}]) {
|
||||
if let Some(replacement) = highlighted_chunk.replacement {
|
||||
@@ -7004,7 +7005,7 @@ impl LineWithInvisibles {
|
||||
strikethrough: text_style.strikethrough,
|
||||
});
|
||||
|
||||
if editor_mode.is_full() {
|
||||
if editor_mode.is_full() && !highlighted_chunk.is_inlay {
|
||||
// Line wrap pads its contents with fake whitespaces,
|
||||
// avoid printing them
|
||||
let is_soft_wrapped = is_row_soft_wrapped(row);
|
||||
|
||||
@@ -264,7 +264,18 @@ pub fn previous_word_start(map: &DisplaySnapshot, point: DisplayPoint) -> Displa
|
||||
let raw_point = point.to_point(map);
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
|
||||
|
||||
let mut is_first_iteration = true;
|
||||
find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| {
|
||||
// Make alt-left skip punctuation on Mac OS to respect Mac VSCode behaviour. For example: hello.| goes to |hello.
|
||||
if is_first_iteration
|
||||
&& classifier.is_punctuation(right)
|
||||
&& !classifier.is_punctuation(left)
|
||||
{
|
||||
is_first_iteration = false;
|
||||
return false;
|
||||
}
|
||||
is_first_iteration = false;
|
||||
|
||||
(classifier.kind(left) != classifier.kind(right) && !classifier.is_whitespace(right))
|
||||
|| left == '\n'
|
||||
})
|
||||
@@ -305,8 +316,18 @@ pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> Dis
|
||||
pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
|
||||
|
||||
let mut is_first_iteration = true;
|
||||
find_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
// Make alt-right skip punctuation on Mac OS to respect the Mac behaviour. For example: |.hello goes to .hello|
|
||||
if is_first_iteration
|
||||
&& classifier.is_punctuation(left)
|
||||
&& !classifier.is_punctuation(right)
|
||||
{
|
||||
is_first_iteration = false;
|
||||
return false;
|
||||
}
|
||||
is_first_iteration = false;
|
||||
|
||||
(classifier.kind(left) != classifier.kind(right) && !classifier.is_whitespace(left))
|
||||
|| right == '\n'
|
||||
})
|
||||
@@ -782,10 +803,15 @@ mod tests {
|
||||
|
||||
fn assert(marked_text: &str, cx: &mut gpui::App) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
previous_word_start(&snapshot, display_points[1]),
|
||||
display_points[0]
|
||||
);
|
||||
let actual = previous_word_start(&snapshot, display_points[1]);
|
||||
let expected = display_points[0];
|
||||
if actual != expected {
|
||||
eprintln!(
|
||||
"previous_word_start mismatch for '{}': actual={:?}, expected={:?}",
|
||||
marked_text, actual, expected
|
||||
);
|
||||
}
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
assert("\nˇ ˇlorem", cx);
|
||||
@@ -796,12 +822,17 @@ mod tests {
|
||||
assert("\nlorem\nˇ ˇipsum", cx);
|
||||
assert("\n\nˇ\nˇ", cx);
|
||||
assert(" ˇlorem ˇipsum", cx);
|
||||
assert("loremˇ-ˇipsum", cx);
|
||||
assert("ˇlorem-ˇipsum", cx);
|
||||
assert("loremˇ-#$@ˇipsum", cx);
|
||||
assert("ˇlorem_ˇipsum", cx);
|
||||
assert(" ˇdefγˇ", cx);
|
||||
assert(" ˇbcΔˇ", cx);
|
||||
assert(" abˇ——ˇcd", cx);
|
||||
// Test punctuation skipping behavior
|
||||
assert("ˇhello.ˇ", cx);
|
||||
assert("helloˇ...ˇ", cx);
|
||||
assert("helloˇ.---..ˇtest", cx);
|
||||
assert("test ˇ.--ˇtest", cx);
|
||||
assert("oneˇ,;:!?ˇtwo", cx);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -955,10 +986,15 @@ mod tests {
|
||||
|
||||
fn assert(marked_text: &str, cx: &mut gpui::App) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
next_word_end(&snapshot, display_points[0]),
|
||||
display_points[1]
|
||||
);
|
||||
let actual = next_word_end(&snapshot, display_points[0]);
|
||||
let expected = display_points[1];
|
||||
if actual != expected {
|
||||
eprintln!(
|
||||
"next_word_end mismatch for '{}': actual={:?}, expected={:?}",
|
||||
marked_text, actual, expected
|
||||
);
|
||||
}
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
assert("\nˇ loremˇ", cx);
|
||||
@@ -967,11 +1003,18 @@ mod tests {
|
||||
assert(" loremˇ ˇ\nipsum\n", cx);
|
||||
assert("\nˇ\nˇ\n\n", cx);
|
||||
assert("loremˇ ipsumˇ ", cx);
|
||||
assert("loremˇ-ˇipsum", cx);
|
||||
assert("loremˇ-ipsumˇ", cx);
|
||||
assert("loremˇ#$@-ˇipsum", cx);
|
||||
assert("loremˇ_ipsumˇ", cx);
|
||||
assert(" ˇbcΔˇ", cx);
|
||||
assert(" abˇ——ˇcd", cx);
|
||||
// Test punctuation skipping behavior
|
||||
assert("ˇ.helloˇ", cx);
|
||||
assert("display_pointsˇ[0ˇ]", cx);
|
||||
assert("ˇ...ˇhello", cx);
|
||||
assert("helloˇ.---..ˇtest", cx);
|
||||
assert("testˇ.--ˇ test", cx);
|
||||
assert("oneˇ,;:!?ˇtwo", cx);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
||||
@@ -45,6 +45,7 @@ pub fn test_font() -> Font {
|
||||
}
|
||||
|
||||
// Returns a snapshot from text containing '|' character markers with the markers removed, and DisplayPoints for each one.
|
||||
#[track_caller]
|
||||
pub fn marked_display_snapshot(
|
||||
text: &str,
|
||||
cx: &mut gpui::App,
|
||||
@@ -83,6 +84,7 @@ pub fn marked_display_snapshot(
|
||||
(snapshot, markers)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn select_ranges(
|
||||
editor: &mut Editor,
|
||||
marked_text: &str,
|
||||
|
||||
@@ -109,6 +109,7 @@ impl EditorTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn new_multibuffer<const COUNT: usize>(
|
||||
cx: &mut gpui::TestAppContext,
|
||||
excerpts: [&str; COUNT],
|
||||
@@ -303,6 +304,7 @@ impl EditorTestContext {
|
||||
fs.set_head_for_repo(
|
||||
&Self::root_path().join(".git"),
|
||||
&[(path.into(), diff_base.to_string())],
|
||||
"deadbeef",
|
||||
);
|
||||
self.cx.run_until_parked();
|
||||
}
|
||||
@@ -351,6 +353,7 @@ impl EditorTestContext {
|
||||
/// editor state was needed to cause the failure.
|
||||
///
|
||||
/// See the `util::test::marked_text_ranges` function for more information.
|
||||
#[track_caller]
|
||||
pub fn set_state(&mut self, marked_text: &str) -> ContextHandle {
|
||||
let state_context = self.add_assertion_context(format!(
|
||||
"Initial Editor State: \"{}\"",
|
||||
@@ -367,6 +370,7 @@ impl EditorTestContext {
|
||||
}
|
||||
|
||||
/// Only change the editor's selections
|
||||
#[track_caller]
|
||||
pub fn set_selections_state(&mut self, marked_text: &str) -> ContextHandle {
|
||||
let state_context = self.add_assertion_context(format!(
|
||||
"Initial Editor State: \"{}\"",
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# Slide 2: Evals. This is our equivalent of swebench, but on our own codebase
|
||||
|
||||
url = "https://github.com/zed-industries/zed.git"
|
||||
revision = "38fcadf9481d018543c65f36ac3bafeba190179b"
|
||||
language_extension = "rs"
|
||||
|
||||
59
crates/eval/src/examples/grep_params_escapement.rs
Normal file
59
crates/eval/src/examples/grep_params_escapement.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use agent_settings::AgentProfileId;
|
||||
use anyhow::Result;
|
||||
use assistant_tools::GrepToolInput;
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::example::{Example, ExampleContext, ExampleMetadata};
|
||||
|
||||
pub struct GrepParamsEscapementExample;
|
||||
|
||||
/*
|
||||
|
||||
This eval checks that the model doesn't use HTML escapement for characters like `<` and
|
||||
`>` in tool parameters.
|
||||
|
||||
original +system_prompt change +tool description
|
||||
claude-opus-4 89% 92% 97%+
|
||||
claude-sonnet-4 100%
|
||||
gpt-4.1-mini 100%
|
||||
gemini-2.5-pro 98%
|
||||
|
||||
*/
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl Example for GrepParamsEscapementExample {
|
||||
fn meta(&self) -> ExampleMetadata {
|
||||
ExampleMetadata {
|
||||
name: "grep_params_escapement".to_string(),
|
||||
url: "https://github.com/octocat/hello-world".to_string(),
|
||||
revision: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d".to_string(),
|
||||
language_server: None,
|
||||
max_assertions: Some(1),
|
||||
profile_id: AgentProfileId::default(),
|
||||
existing_thread_json: None,
|
||||
max_turns: Some(2),
|
||||
}
|
||||
}
|
||||
|
||||
async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> {
|
||||
// cx.push_user_message("How does the precedence/specificity work with Keymap contexts? I am seeing that `MessageEditor > Editor` is lower precendence than `Editor` which is surprising to me, but might be how it works");
|
||||
cx.push_user_message("Search for files containing the characters `>` or `<`");
|
||||
let response = cx.run_turns(2).await?;
|
||||
let grep_input = response
|
||||
.find_tool_call("grep")
|
||||
.and_then(|tool_use| tool_use.parse_input::<GrepToolInput>().ok());
|
||||
|
||||
cx.assert_some(grep_input.as_ref(), "`grep` tool should be called")?;
|
||||
|
||||
cx.assert(
|
||||
!contains_html_entities(&grep_input.unwrap().regex),
|
||||
"Tool parameters should not be escaped",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn contains_html_entities(pattern: &str) -> bool {
|
||||
regex::Regex::new(r"&[a-zA-Z]+;|&#[0-9]+;|&#x[0-9a-fA-F]+;")
|
||||
.unwrap()
|
||||
.is_match(pattern)
|
||||
}
|
||||
@@ -16,6 +16,7 @@ mod add_arg_to_trait_method;
|
||||
mod code_block_citations;
|
||||
mod comment_translation;
|
||||
mod file_search;
|
||||
mod grep_params_escapement;
|
||||
mod overwrite_file;
|
||||
mod planets;
|
||||
|
||||
@@ -27,6 +28,7 @@ pub fn all(examples_dir: &Path) -> Vec<Rc<dyn Example>> {
|
||||
Rc::new(planets::Planets),
|
||||
Rc::new(comment_translation::CommentTranslation),
|
||||
Rc::new(overwrite_file::FileOverwriteExample),
|
||||
Rc::new(grep_params_escapement::GrepParamsEscapementExample),
|
||||
];
|
||||
|
||||
for example_path in list_declarative_examples(examples_dir).unwrap() {
|
||||
|
||||
@@ -101,7 +101,10 @@ pub fn init(cx: &mut App) {
|
||||
directories: true,
|
||||
multiple: false,
|
||||
},
|
||||
DirectoryLister::Local(workspace.app_state().fs.clone()),
|
||||
DirectoryLister::Local(
|
||||
workspace.project().clone(),
|
||||
workspace.app_state().fs.clone(),
|
||||
),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -4,7 +4,6 @@ mod file_finder_tests;
|
||||
mod open_path_prompt_tests;
|
||||
|
||||
pub mod file_finder_settings;
|
||||
mod new_path_prompt;
|
||||
mod open_path_prompt;
|
||||
|
||||
use futures::future::join_all;
|
||||
@@ -20,7 +19,6 @@ use gpui::{
|
||||
KeyContext, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task, WeakEntity,
|
||||
Window, actions,
|
||||
};
|
||||
use new_path_prompt::NewPathPrompt;
|
||||
use open_path_prompt::OpenPathPrompt;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
|
||||
@@ -85,8 +83,8 @@ pub fn init_settings(cx: &mut App) {
|
||||
pub fn init(cx: &mut App) {
|
||||
init_settings(cx);
|
||||
cx.observe_new(FileFinder::register).detach();
|
||||
cx.observe_new(NewPathPrompt::register).detach();
|
||||
cx.observe_new(OpenPathPrompt::register).detach();
|
||||
cx.observe_new(OpenPathPrompt::register_new_path).detach();
|
||||
}
|
||||
|
||||
impl FileFinder {
|
||||
@@ -332,6 +330,7 @@ impl FileFinder {
|
||||
worktree_id: WorktreeId::from_usize(m.0.worktree_id),
|
||||
path: m.0.path.clone(),
|
||||
},
|
||||
Match::CreateNew(p) => p.clone(),
|
||||
};
|
||||
let open_task = workspace.update(cx, move |workspace, cx| {
|
||||
workspace.split_path_preview(path, false, Some(split_direction), window, cx)
|
||||
@@ -456,13 +455,15 @@ enum Match {
|
||||
panel_match: Option<ProjectPanelOrdMatch>,
|
||||
},
|
||||
Search(ProjectPanelOrdMatch),
|
||||
CreateNew(ProjectPath),
|
||||
}
|
||||
|
||||
impl Match {
|
||||
fn path(&self) -> &Arc<Path> {
|
||||
fn path(&self) -> Option<&Arc<Path>> {
|
||||
match self {
|
||||
Match::History { path, .. } => &path.project.path,
|
||||
Match::Search(panel_match) => &panel_match.0.path,
|
||||
Match::History { path, .. } => Some(&path.project.path),
|
||||
Match::Search(panel_match) => Some(&panel_match.0.path),
|
||||
Match::CreateNew(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -470,6 +471,7 @@ impl Match {
|
||||
match self {
|
||||
Match::History { panel_match, .. } => panel_match.as_ref(),
|
||||
Match::Search(panel_match) => Some(&panel_match),
|
||||
Match::CreateNew(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -499,7 +501,10 @@ impl Matches {
|
||||
// reason for the matches set to change.
|
||||
self.matches
|
||||
.iter()
|
||||
.position(|m| path.project.path == *m.path())
|
||||
.position(|m| match m.path() {
|
||||
Some(p) => path.project.path == *p,
|
||||
None => false,
|
||||
})
|
||||
.ok_or(0)
|
||||
} else {
|
||||
self.matches.binary_search_by(|m| {
|
||||
@@ -576,6 +581,12 @@ impl Matches {
|
||||
a: &Match,
|
||||
b: &Match,
|
||||
) -> cmp::Ordering {
|
||||
// Handle CreateNew variant - always put it at the end
|
||||
match (a, b) {
|
||||
(Match::CreateNew(_), _) => return cmp::Ordering::Less,
|
||||
(_, Match::CreateNew(_)) => return cmp::Ordering::Greater,
|
||||
_ => {}
|
||||
}
|
||||
debug_assert!(a.panel_match().is_some() && b.panel_match().is_some());
|
||||
|
||||
match (&a, &b) {
|
||||
@@ -908,6 +919,23 @@ impl FileFinderDelegate {
|
||||
matches.into_iter(),
|
||||
extend_old_matches,
|
||||
);
|
||||
let worktree = self.project.read(cx).visible_worktrees(cx).next();
|
||||
let filename = query.raw_query.to_string();
|
||||
let path = Path::new(&filename);
|
||||
|
||||
// add option of creating new file only if path is relative
|
||||
if let Some(worktree) = worktree {
|
||||
let worktree = worktree.read(cx);
|
||||
if path.is_relative()
|
||||
&& worktree.entry_for_path(&path).is_none()
|
||||
&& !filename.ends_with("/")
|
||||
{
|
||||
self.matches.matches.push(Match::CreateNew(ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: Arc::from(path),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
self.selected_index = selected_match.map_or_else(
|
||||
|| self.calculate_selected_index(cx),
|
||||
@@ -988,6 +1016,12 @@ impl FileFinderDelegate {
|
||||
}
|
||||
}
|
||||
Match::Search(path_match) => self.labels_for_path_match(&path_match.0),
|
||||
Match::CreateNew(project_path) => (
|
||||
format!("Create file: {}", project_path.path.display()),
|
||||
vec![],
|
||||
String::from(""),
|
||||
vec![],
|
||||
),
|
||||
};
|
||||
|
||||
if file_name_positions.is_empty() {
|
||||
@@ -1372,6 +1406,29 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
}
|
||||
};
|
||||
match &m {
|
||||
Match::CreateNew(project_path) => {
|
||||
// Create a new file with the given filename
|
||||
if secondary {
|
||||
workspace.split_path_preview(
|
||||
project_path.clone(),
|
||||
false,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
workspace.open_path_preview(
|
||||
project_path.clone(),
|
||||
None,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Match::History { path, .. } => {
|
||||
let worktree_id = path.project.worktree_id;
|
||||
if workspace
|
||||
@@ -1502,6 +1559,10 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
.flex_none()
|
||||
.size(IconSize::Small.rems())
|
||||
.into_any_element(),
|
||||
Match::CreateNew(_) => Icon::new(IconName::Plus)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
};
|
||||
let (file_name_label, full_path_label) = self.labels_for_match(path_match, window, cx, ix);
|
||||
|
||||
@@ -1509,7 +1570,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
if !settings.file_icons {
|
||||
return None;
|
||||
}
|
||||
let file_name = path_match.path().file_name()?;
|
||||
let file_name = path_match.path()?.file_name()?;
|
||||
let icon = FileIcons::get_icon(file_name.as_ref(), cx)?;
|
||||
Some(Icon::from_path(icon).color(Color::Muted))
|
||||
});
|
||||
|
||||
@@ -196,7 +196,7 @@ async fn test_matching_paths(cx: &mut TestAppContext) {
|
||||
|
||||
cx.simulate_input("bna");
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(picker.delegate.matches.len(), 2);
|
||||
assert_eq!(picker.delegate.matches.len(), 3);
|
||||
});
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
@@ -229,7 +229,12 @@ async fn test_matching_paths(cx: &mut TestAppContext) {
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(
|
||||
picker.delegate.matches.len(),
|
||||
1,
|
||||
// existence of CreateNew option depends on whether path already exists
|
||||
if bandana_query == util::separator!("a/bandana") {
|
||||
1
|
||||
} else {
|
||||
2
|
||||
},
|
||||
"Wrong number of matches for bandana query '{bandana_query}'. Matches: {:?}",
|
||||
picker.delegate.matches
|
||||
);
|
||||
@@ -269,9 +274,9 @@ async fn test_unicode_paths(cx: &mut TestAppContext) {
|
||||
|
||||
cx.simulate_input("g");
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(picker.delegate.matches.len(), 1);
|
||||
assert_eq!(picker.delegate.matches.len(), 2);
|
||||
assert_match_at_position(picker, 1, "g");
|
||||
});
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
cx.read(|cx| {
|
||||
let active_editor = workspace.read(cx).active_item_as::<Editor>(cx).unwrap();
|
||||
@@ -365,13 +370,12 @@ async fn test_complex_path(cx: &mut TestAppContext) {
|
||||
|
||||
cx.simulate_input("t");
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(picker.delegate.matches.len(), 1);
|
||||
assert_eq!(picker.delegate.matches.len(), 2);
|
||||
assert_eq!(
|
||||
collect_search_matches(picker).search_paths_only(),
|
||||
vec![PathBuf::from("其他/S数据表格/task.xlsx")],
|
||||
)
|
||||
});
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
cx.read(|cx| {
|
||||
let active_editor = workspace.read(cx).active_item_as::<Editor>(cx).unwrap();
|
||||
@@ -416,8 +420,9 @@ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_match_at_position(finder, 1, &query_inside_file.to_string());
|
||||
let finder = &finder.delegate;
|
||||
assert_eq!(finder.matches.len(), 1);
|
||||
assert_eq!(finder.matches.len(), 2);
|
||||
let latest_search_query = finder
|
||||
.latest_search_query
|
||||
.as_ref()
|
||||
@@ -431,7 +436,6 @@ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) {
|
||||
);
|
||||
});
|
||||
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
|
||||
let editor = cx.update(|_, cx| workspace.read(cx).active_item_as::<Editor>(cx).unwrap());
|
||||
@@ -491,8 +495,9 @@ async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_match_at_position(finder, 1, &query_outside_file.to_string());
|
||||
let delegate = &finder.delegate;
|
||||
assert_eq!(delegate.matches.len(), 1);
|
||||
assert_eq!(delegate.matches.len(), 2);
|
||||
let latest_search_query = delegate
|
||||
.latest_search_query
|
||||
.as_ref()
|
||||
@@ -506,7 +511,6 @@ async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) {
|
||||
);
|
||||
});
|
||||
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
|
||||
let editor = cx.update(|_, cx| workspace.read(cx).active_item_as::<Editor>(cx).unwrap());
|
||||
@@ -561,7 +565,8 @@ async fn test_matching_cancellation(cx: &mut TestAppContext) {
|
||||
.await;
|
||||
|
||||
picker.update(cx, |picker, _cx| {
|
||||
assert_eq!(picker.delegate.matches.len(), 5)
|
||||
// CreateNew option not shown in this case since file already exists
|
||||
assert_eq!(picker.delegate.matches.len(), 5);
|
||||
});
|
||||
|
||||
picker.update_in(cx, |picker, window, cx| {
|
||||
@@ -959,7 +964,8 @@ async fn test_search_worktree_without_files(cx: &mut TestAppContext) {
|
||||
.await;
|
||||
cx.read(|cx| {
|
||||
let finder = picker.read(cx);
|
||||
assert_eq!(finder.delegate.matches.len(), 0);
|
||||
assert_eq!(finder.delegate.matches.len(), 1);
|
||||
assert_match_at_position(finder, 0, "dir");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1518,12 +1524,13 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one(
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 5);
|
||||
assert_eq!(finder.delegate.matches.len(), 6);
|
||||
assert_match_at_position(finder, 0, "main.rs");
|
||||
assert_match_selection(finder, 1, "bar.rs");
|
||||
assert_match_at_position(finder, 2, "lib.rs");
|
||||
assert_match_at_position(finder, 3, "moo.rs");
|
||||
assert_match_at_position(finder, 4, "maaa.rs");
|
||||
assert_match_at_position(finder, 5, ".rs");
|
||||
});
|
||||
|
||||
// main.rs is not among matches, select top item
|
||||
@@ -1533,9 +1540,10 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one(
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_at_position(finder, 0, "bar.rs");
|
||||
assert_match_at_position(finder, 1, "lib.rs");
|
||||
assert_match_at_position(finder, 2, "b");
|
||||
});
|
||||
|
||||
// main.rs is back, put it on top and select next item
|
||||
@@ -1545,10 +1553,11 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one(
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_eq!(finder.delegate.matches.len(), 4);
|
||||
assert_match_at_position(finder, 0, "main.rs");
|
||||
assert_match_selection(finder, 1, "moo.rs");
|
||||
assert_match_at_position(finder, 2, "maaa.rs");
|
||||
assert_match_at_position(finder, 3, "m");
|
||||
});
|
||||
|
||||
// get back to the initial state
|
||||
@@ -1623,12 +1632,13 @@ async fn test_setting_auto_select_first_and_select_active_file(cx: &mut TestAppC
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 5);
|
||||
assert_eq!(finder.delegate.matches.len(), 6);
|
||||
assert_match_selection(finder, 0, "main.rs");
|
||||
assert_match_at_position(finder, 1, "bar.rs");
|
||||
assert_match_at_position(finder, 2, "lib.rs");
|
||||
assert_match_at_position(finder, 3, "moo.rs");
|
||||
assert_match_at_position(finder, 4, "maaa.rs");
|
||||
assert_match_at_position(finder, 5, ".rs");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1679,12 +1689,13 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 5);
|
||||
assert_eq!(finder.delegate.matches.len(), 6);
|
||||
assert_match_at_position(finder, 0, "main.rs");
|
||||
assert_match_selection(finder, 1, "moo.rs");
|
||||
assert_match_at_position(finder, 2, "bar.rs");
|
||||
assert_match_at_position(finder, 3, "lib.rs");
|
||||
assert_match_at_position(finder, 4, "maaa.rs");
|
||||
assert_match_at_position(finder, 5, ".rs");
|
||||
});
|
||||
|
||||
// main.rs is not among matches, select top item
|
||||
@@ -1694,9 +1705,10 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_at_position(finder, 0, "bar.rs");
|
||||
assert_match_at_position(finder, 1, "lib.rs");
|
||||
assert_match_at_position(finder, 2, "b");
|
||||
});
|
||||
|
||||
// main.rs is back, put it on top and select next item
|
||||
@@ -1706,10 +1718,11 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_eq!(finder.delegate.matches.len(), 4);
|
||||
assert_match_at_position(finder, 0, "main.rs");
|
||||
assert_match_selection(finder, 1, "moo.rs");
|
||||
assert_match_at_position(finder, 2, "maaa.rs");
|
||||
assert_match_at_position(finder, 3, "m");
|
||||
});
|
||||
|
||||
// get back to the initial state
|
||||
@@ -1965,9 +1978,10 @@ async fn test_search_results_refreshed_on_worktree_updates(cx: &mut gpui::TestAp
|
||||
let picker = open_file_picker(&workspace, cx);
|
||||
cx.simulate_input("rs");
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_at_position(finder, 0, "lib.rs");
|
||||
assert_match_at_position(finder, 1, "main.rs");
|
||||
assert_match_at_position(finder, 2, "rs");
|
||||
});
|
||||
|
||||
// Delete main.rs
|
||||
@@ -1980,8 +1994,9 @@ async fn test_search_results_refreshed_on_worktree_updates(cx: &mut gpui::TestAp
|
||||
|
||||
// main.rs is in not among search results anymore
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 1);
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_match_at_position(finder, 0, "lib.rs");
|
||||
assert_match_at_position(finder, 1, "rs");
|
||||
});
|
||||
|
||||
// Create util.rs
|
||||
@@ -1994,9 +2009,10 @@ async fn test_search_results_refreshed_on_worktree_updates(cx: &mut gpui::TestAp
|
||||
|
||||
// util.rs is among search results
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_at_position(finder, 0, "lib.rs");
|
||||
assert_match_at_position(finder, 1, "util.rs");
|
||||
assert_match_at_position(finder, 2, "rs");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2036,9 +2052,10 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees(
|
||||
let picker = open_file_picker(&workspace, cx);
|
||||
cx.simulate_input("rs");
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_at_position(finder, 0, "bar.rs");
|
||||
assert_match_at_position(finder, 1, "lib.rs");
|
||||
assert_match_at_position(finder, 2, "rs");
|
||||
});
|
||||
|
||||
// Add new worktree
|
||||
@@ -2054,10 +2071,11 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees(
|
||||
|
||||
// main.rs is among search results
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_eq!(finder.delegate.matches.len(), 4);
|
||||
assert_match_at_position(finder, 0, "bar.rs");
|
||||
assert_match_at_position(finder, 1, "lib.rs");
|
||||
assert_match_at_position(finder, 2, "main.rs");
|
||||
assert_match_at_position(finder, 3, "rs");
|
||||
});
|
||||
|
||||
// Remove the first worktree
|
||||
@@ -2068,8 +2086,9 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees(
|
||||
|
||||
// Files from the first worktree are not in the search results anymore
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 1);
|
||||
assert_eq!(finder.delegate.matches.len(), 2);
|
||||
assert_match_at_position(finder, 0, "main.rs");
|
||||
assert_match_at_position(finder, 1, "rs");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2414,7 +2433,7 @@ async fn test_repeat_toggle_action(cx: &mut gpui::TestAppContext) {
|
||||
cx.run_until_parked();
|
||||
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(picker.delegate.matches.len(), 6);
|
||||
assert_eq!(picker.delegate.matches.len(), 7);
|
||||
assert_eq!(picker.delegate.selected_index, 0);
|
||||
});
|
||||
|
||||
@@ -2426,7 +2445,7 @@ async fn test_repeat_toggle_action(cx: &mut gpui::TestAppContext) {
|
||||
cx.run_until_parked();
|
||||
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(picker.delegate.matches.len(), 6);
|
||||
assert_eq!(picker.delegate.matches.len(), 7);
|
||||
assert_eq!(picker.delegate.selected_index, 3);
|
||||
});
|
||||
}
|
||||
@@ -2468,7 +2487,7 @@ async fn open_queried_buffer(
|
||||
let history_items = picker.update(cx, |finder, _| {
|
||||
assert_eq!(
|
||||
finder.delegate.matches.len(),
|
||||
expected_matches,
|
||||
expected_matches + 1, // +1 from CreateNew option
|
||||
"Unexpected number of matches found for query `{input}`, matches: {:?}",
|
||||
finder.delegate.matches
|
||||
);
|
||||
@@ -2617,6 +2636,7 @@ fn collect_search_matches(picker: &Picker<FileFinderDelegate>) -> SearchEntries
|
||||
.push(Path::new(path_match.0.path_prefix.as_ref()).join(&path_match.0.path));
|
||||
search_entries.search_matches.push(path_match.0.clone());
|
||||
}
|
||||
Match::CreateNew(_) => {}
|
||||
}
|
||||
}
|
||||
search_entries
|
||||
@@ -2650,6 +2670,7 @@ fn assert_match_at_position(
|
||||
let match_file_name = match &match_item {
|
||||
Match::History { path, .. } => path.absolute.as_deref().unwrap().file_name(),
|
||||
Match::Search(path_match) => path_match.0.path.file_name(),
|
||||
Match::CreateNew(project_path) => project_path.path.file_name(),
|
||||
}
|
||||
.unwrap()
|
||||
.to_string_lossy();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user