Compare commits
329 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2eac6a9222 | ||
|
|
7c3cffdc52 | ||
|
|
5a3186b659 | ||
|
|
caf54844de | ||
|
|
745ebe2313 | ||
|
|
5c95e942e6 | ||
|
|
f979f24bfa | ||
|
|
411b9abb9e | ||
|
|
81d4d48ef2 | ||
|
|
cd9284761a | ||
|
|
34f9eef879 | ||
|
|
23cf6bf268 | ||
|
|
c97e477eb1 | ||
|
|
16804a81cc | ||
|
|
8bf39bf768 | ||
|
|
75922e8fcd | ||
|
|
2eb83364ae | ||
|
|
5d22585ef5 | ||
|
|
71303fa18b | ||
|
|
5753b978a0 | ||
|
|
9cf2490ed7 | ||
|
|
28ea3ea529 | ||
|
|
2dad48d8d9 | ||
|
|
16853acbb1 | ||
|
|
64d649245c | ||
|
|
08210b512d | ||
|
|
6070aea6c0 | ||
|
|
16b44d53f9 | ||
|
|
3bed830a1f | ||
|
|
ee2a329981 | ||
|
|
6d64058fc6 | ||
|
|
7c2822a020 | ||
|
|
3db00384f4 | ||
|
|
3dfbd9e57c | ||
|
|
b103d7621b | ||
|
|
ab70e524c8 | ||
|
|
f0ce62ead8 | ||
|
|
f0345df479 | ||
|
|
bbd2262a93 | ||
|
|
c4fd9e1a6b | ||
|
|
0b7583bae5 | ||
|
|
e4bd115a63 | ||
|
|
fa54fa80d0 | ||
|
|
de16f2bbe6 | ||
|
|
e3b13b54c9 | ||
|
|
2c5d2a58d8 | ||
|
|
3485b7704b | ||
|
|
6801b9137f | ||
|
|
3853e83da7 | ||
|
|
047a7f5d29 | ||
|
|
8332e60ca9 | ||
|
|
afab4b522e | ||
|
|
0cb7dd2972 | ||
|
|
387281fa5b | ||
|
|
72bcb0beb7 | ||
|
|
4ff41ba62e | ||
|
|
16e901fb8f | ||
|
|
54b4587f9a | ||
|
|
1fe10117b7 | ||
|
|
78fd2685d5 | ||
|
|
da9e958b15 | ||
|
|
3908ca9744 | ||
|
|
6fe58a2c4e | ||
|
|
79e7ccc1fe | ||
|
|
0bc9478b46 | ||
|
|
4ac7935589 | ||
|
|
c75ad2fd11 | ||
|
|
365997d79d | ||
|
|
c57a6263aa | ||
|
|
ebea734515 | ||
|
|
4fe05530b0 | ||
|
|
b15aef4310 | ||
|
|
23adff6ff2 | ||
|
|
866fe427b3 | ||
|
|
f7b2faf64f | ||
|
|
5187954711 | ||
|
|
cabd22f36b | ||
|
|
1552198b55 | ||
|
|
0da97b0c8b | ||
|
|
037df8cec5 | ||
|
|
05ac9f1f84 | ||
|
|
9ffb3c5176 | ||
|
|
72d787b3ae | ||
|
|
104f601413 | ||
|
|
46773ebbd8 | ||
|
|
65a93a0036 | ||
|
|
dc63138089 | ||
|
|
fa02bd71c3 | ||
|
|
6d95fd9167 | ||
|
|
899153d9a4 | ||
|
|
77ead25f8c | ||
|
|
9e5f89dc26 | ||
|
|
cf5e76b1b9 | ||
|
|
9775747ba9 | ||
|
|
b7c2d4876c | ||
|
|
2fe1293fba | ||
|
|
e0057ccd0f | ||
|
|
51585e770d | ||
|
|
52fa7ababb | ||
|
|
5ad51ca48e | ||
|
|
35a119d573 | ||
|
|
bbf3b20fc3 | ||
|
|
c1b997002a | ||
|
|
69e99b9f2f | ||
|
|
0fc85a020a | ||
|
|
974f724151 | ||
|
|
ca3f46588a | ||
|
|
d9efa2860f | ||
|
|
ac806d982b | ||
|
|
73cd6ef92c | ||
|
|
019a14bcde | ||
|
|
95d78ff8d5 | ||
|
|
454adfacae | ||
|
|
edd40566b7 | ||
|
|
a40ee74a1f | ||
|
|
2e883be4b5 | ||
|
|
6ea4d2b30d | ||
|
|
380d8c5662 | ||
|
|
508b604b67 | ||
|
|
3da1de2a48 | ||
|
|
8837e5564d | ||
|
|
709523bf36 | ||
|
|
7afee64119 | ||
|
|
cd0ef4b982 | ||
|
|
be6f29cc28 | ||
|
|
38b8e6549f | ||
|
|
b8c1b54f9e | ||
|
|
c304e964fe | ||
|
|
53abad5979 | ||
|
|
54e64b2407 | ||
|
|
ce8854007f | ||
|
|
3e8565ac25 | ||
|
|
d801b7b12e | ||
|
|
37fa42d5cc | ||
|
|
5c9b8e8321 | ||
|
|
96609151c6 | ||
|
|
e37c78bde7 | ||
|
|
920ca688a7 | ||
|
|
f62d76159b | ||
|
|
6a8fdbfd62 | ||
|
|
711a9e5753 | ||
|
|
6de5d29bff | ||
|
|
d7015e5b8f | ||
|
|
ddf70b3bb8 | ||
|
|
8bd8435887 | ||
|
|
4b297a9967 | ||
|
|
7aa70a4858 | ||
|
|
04cd3fcd23 | ||
|
|
d15d85830a | ||
|
|
ccc173ebb1 | ||
|
|
03a030fd00 | ||
|
|
894f3b9d15 | ||
|
|
f36143a461 | ||
|
|
8730d317a8 | ||
|
|
783b33b5c9 | ||
|
|
28da99cc06 | ||
|
|
d2c265c71f | ||
|
|
bbd431ae8c | ||
|
|
5b9d3ea097 | ||
|
|
738cfdff84 | ||
|
|
32d5a2cca0 | ||
|
|
dda614091a | ||
|
|
fa9da6ad5b | ||
|
|
d082cfdbec | ||
|
|
c71791d64e | ||
|
|
244d8517f1 | ||
|
|
3884de937b | ||
|
|
8af984ae70 | ||
|
|
9d533f9d30 | ||
|
|
274a40b7e0 | ||
|
|
9c7b1d19ce | ||
|
|
3d9881121f | ||
|
|
a2e98e9f0e | ||
|
|
7c64737e00 | ||
|
|
8191a5339d | ||
|
|
17c3b741ec | ||
|
|
52770cd3ad | ||
|
|
4ac67ac5ae | ||
|
|
8c1b549683 | ||
|
|
ff6ac60bad | ||
|
|
f8ab51307a | ||
|
|
0a2186c87b | ||
|
|
c3653f4cb1 | ||
|
|
8b28941c14 | ||
|
|
aefb798090 | ||
|
|
2c5aa5891d | ||
|
|
7d54d9f45e | ||
|
|
cde47e60cd | ||
|
|
79f96a5afe | ||
|
|
81058ee172 | ||
|
|
89743117c6 | ||
|
|
6de37fa57c | ||
|
|
beb0d49dc4 | ||
|
|
c9aadadc4b | ||
|
|
bcd182f480 | ||
|
|
3987b60738 | ||
|
|
827103908e | ||
|
|
8e9e3ba1a5 | ||
|
|
676ed8fb8a | ||
|
|
4304521655 | ||
|
|
04716a0e4a | ||
|
|
5e38915d45 | ||
|
|
f9257b0efe | ||
|
|
5d0c96872b | ||
|
|
071e684be4 | ||
|
|
2280594408 | ||
|
|
09a1d51e9a | ||
|
|
ac15194d11 | ||
|
|
988d834c33 | ||
|
|
48eacf3f2a | ||
|
|
030d4d2631 | ||
|
|
10df7b5eb9 | ||
|
|
55120c4231 | ||
|
|
8227c45a11 | ||
|
|
d23359e19a | ||
|
|
936ad0bf10 | ||
|
|
faa0bb51c9 | ||
|
|
2db2271e3c | ||
|
|
79b1dd7db8 | ||
|
|
81f8e2ed4a | ||
|
|
b9256dd469 | ||
|
|
27d3da678c | ||
|
|
03357f3f7b | ||
|
|
4aabba6cf6 | ||
|
|
8c46a4f594 | ||
|
|
522abe8e59 | ||
|
|
5ae8c4cf09 | ||
|
|
d8195a8fd7 | ||
|
|
2645591cd5 | ||
|
|
526a7c0702 | ||
|
|
e793740168 | ||
|
|
dea0a58727 | ||
|
|
b7abc9d493 | ||
|
|
01a77bb231 | ||
|
|
de225fd242 | ||
|
|
1bc052d76b | ||
|
|
29cb95a3ca | ||
|
|
1307b81721 | ||
|
|
203754d0db | ||
|
|
c9c603b1d1 | ||
|
|
e13b494c9e | ||
|
|
c0397727e0 | ||
|
|
9c2b90fb8f | ||
|
|
d108e5f53c | ||
|
|
2551bde1d3 | ||
|
|
e7de80c6ae | ||
|
|
ae210eced8 | ||
|
|
a9d99d8347 | ||
|
|
3e6435eddc | ||
|
|
9e75871d48 | ||
|
|
707a4c7f20 | ||
|
|
854076f96d | ||
|
|
cf931247d0 | ||
|
|
b74477d12e | ||
|
|
3077abf9cf | ||
|
|
07dab4e94a | ||
|
|
59686f1f44 | ||
|
|
a60bea8a3d | ||
|
|
b820aa1fcd | ||
|
|
55d91bce53 | ||
|
|
b798392050 | ||
|
|
657c8b1084 | ||
|
|
2bb8aa2f73 | ||
|
|
beeb42da29 | ||
|
|
6d66ff1d95 | ||
|
|
e0b818af62 | ||
|
|
58a400b1ee | ||
|
|
8ab7d44d51 | ||
|
|
56d4c0af9f | ||
|
|
feeda7fa37 | ||
|
|
4a5c55a8f2 | ||
|
|
7c1ae9bcc3 | ||
|
|
6f97da3435 | ||
|
|
63c1033448 | ||
|
|
b16911e756 | ||
|
|
b14401f817 | ||
|
|
17cf865d1e | ||
|
|
b7ec437b13 | ||
|
|
f1aab1120d | ||
|
|
3f90bc81bd | ||
|
|
9d5fb3c3f3 | ||
|
|
864767ad35 | ||
|
|
ec69b68e72 | ||
|
|
9dd18e5ee1 | ||
|
|
2ebe16a52f | ||
|
|
1ed4647203 | ||
|
|
ebed567adb | ||
|
|
a6544c70c5 | ||
|
|
b363e1a482 | ||
|
|
65e3e84cbc | ||
|
|
1e1d4430c2 | ||
|
|
c874f1fa9d | ||
|
|
9a9e96ed5a | ||
|
|
8c46e290df | ||
|
|
aacbb9c2f4 | ||
|
|
f90333f92e | ||
|
|
b24f614ca3 | ||
|
|
cefa0cbed8 | ||
|
|
3fb1023667 | ||
|
|
9c715b470e | ||
|
|
ae219e9e99 | ||
|
|
6d99c12796 | ||
|
|
8fb7fa941a | ||
|
|
22d75b798e | ||
|
|
06a199da4d | ||
|
|
ab6125ddde | ||
|
|
d3bc561f26 | ||
|
|
f13f2dfb70 | ||
|
|
24e4446cd3 | ||
|
|
cc536655a1 | ||
|
|
2a9e73c65d | ||
|
|
4f1728e5ee | ||
|
|
40c91d5df0 | ||
|
|
fe1b36671d | ||
|
|
bb9e2b0403 | ||
|
|
4f8d7f0a6b | ||
|
|
caf3d30bf6 | ||
|
|
df0cf22347 | ||
|
|
a305eda8d1 | ||
|
|
ba7b1db054 | ||
|
|
019c8ded77 | ||
|
|
1704dbea7e | ||
|
|
eefa6c4882 | ||
|
|
1f17df7fb0 | ||
|
|
6d687a2c2c | ||
|
|
32214abb64 | ||
|
|
a78563b80b | ||
|
|
f881cacd8a | ||
|
|
a539a38f13 |
9
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Bug Report (AI Related)
|
||||
name: Bug Report (AI)
|
||||
description: Zed Agent Panel Bugs
|
||||
type: "Bug"
|
||||
labels: ["ai"]
|
||||
@@ -19,15 +19,14 @@ body:
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
### Model Provider Details
|
||||
- Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc)
|
||||
- Model Name:
|
||||
- Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
|
||||
- MCP Servers in-use:
|
||||
- Other Details:
|
||||
- Other Details (MCPs, other settings, etc):
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
name: Bug Report (Edit Predictions)
|
||||
description: Zed Edit Predictions bugs
|
||||
type: "Bug"
|
||||
labels: ["ai", "inline completion", "zeta"]
|
||||
title: "Edit Predictions: <a short description of the Edit Prediction bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
<!-- Please include the LLM provider and model name you are using -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
35
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Bug Report (Git)
|
||||
description: Zed Git-Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["git"]
|
||||
title: "Git: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
4
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -19,8 +19,8 @@ body:
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -18,14 +18,16 @@ body:
|
||||
- Issues with insufficient detail may be summarily closed.
|
||||
-->
|
||||
|
||||
DESCRIPTION_HERE
|
||||
|
||||
Steps to reproduce:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
|
||||
Expected Behavior:
|
||||
Actual Behavior:
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
<!-- Before Submitting, did you:
|
||||
1. Include settings.json, keymap.json, .editorconfig if relevant?
|
||||
|
||||
32
.github/actions/build_docs/action.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: "Build docs"
|
||||
description: "Build the docs"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: ./script/linux
|
||||
|
||||
- name: Check for broken links
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1
|
||||
with:
|
||||
args: --no-progress './docs/src/**/*'
|
||||
fail: true
|
||||
|
||||
- name: Build book
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
13
.github/actions/run_tests/action.yml
vendored
@@ -1,6 +1,12 @@
|
||||
name: "Run tests"
|
||||
description: "Runs the tests"
|
||||
|
||||
inputs:
|
||||
use-xvfb:
|
||||
description: "Whether to run tests with xvfb"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
@@ -20,4 +26,9 @@ runs:
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: |
|
||||
if [ "${{ inputs.use-xvfb }}" == "true" ]; then
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24 -nolisten tcp" cargo nextest run --workspace --no-fail-fast
|
||||
else
|
||||
cargo nextest run --workspace --no-fail-fast
|
||||
fi
|
||||
|
||||
96
.github/workflows/ci.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
- macOS
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -183,6 +183,9 @@ jobs:
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
|
||||
- name: Check modifier use in keymaps
|
||||
run: script/check-keymaps
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
@@ -191,6 +194,27 @@ jobs:
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
check_docs:
|
||||
timeout-minutes: 60
|
||||
name: Check docs
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Build docs
|
||||
uses: ./.github/actions/build_docs
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
@@ -200,7 +224,7 @@ jobs:
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
- macOS
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -295,6 +319,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
with:
|
||||
use-xvfb: true
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
@@ -482,7 +508,9 @@ jobs:
|
||||
- macos_tests
|
||||
- windows_clippy
|
||||
- windows_tests
|
||||
if: always()
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
@@ -713,6 +741,64 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
# MYTOKEN2: "value2"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz
|
||||
path: out/zed-remote-server-freebsd-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
out/zed-remote-server-freebsd-x86_64.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
nix-build:
|
||||
name: Build with Nix
|
||||
uses: ./.github/workflows/nix.yml
|
||||
@@ -727,12 +813,12 @@ jobs:
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64]
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, freebsd]
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
steps:
|
||||
- name: gh release
|
||||
run: gh release edit $GITHUB_REF_NAME --draft=true
|
||||
run: gh release edit $GITHUB_REF_NAME --draft=false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
19
.github/workflows/deploy_cloudflare.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
deploy-docs:
|
||||
name: Deploy Docs
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: buildjet-16vcpu-ubuntu-2204
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
@@ -17,24 +17,11 @@ jobs:
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
- name: Set up default .cargo/config.toml
|
||||
run: cp ./.cargo/collab-config.toml ./.cargo/config.toml
|
||||
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libxkbcommon-dev libxkbcommon-x11-dev
|
||||
|
||||
- name: Build book
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
- name: Build docs
|
||||
uses: ./.github/actions/build_docs
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
|
||||
4
.github/workflows/deploy_collab.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
- macOS
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
name: Run tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
- macOS
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
|
||||
48
.github/workflows/release_nightly.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
- macOS
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
- macOS
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
@@ -167,6 +167,50 @@ jobs:
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
# MYTOKEN2: "value2"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly freebsd
|
||||
|
||||
bundle-nix:
|
||||
name: Build and cache Nix package
|
||||
needs: tests
|
||||
|
||||
85
.github/workflows/unit_evals.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: Run Unit Evals
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
|
||||
- cron: "47 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
|
||||
jobs:
|
||||
unit_evals:
|
||||
timeout-minutes: 60
|
||||
name: Run unit evals
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run unit evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)'
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
- name: Send failure message to Slack channel if needed
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
6
.rules
@@ -5,6 +5,12 @@
|
||||
* Prefer implementing functionality in existing files unless it is a new logical component. Avoid creating many small files.
|
||||
* Avoid using functions that panic like `unwrap()`, instead use mechanisms like `?` to propagate errors.
|
||||
* Be careful with operations like indexing which may panic if the indexes are out of bounds.
|
||||
* Never silently discard errors with `let _ =` on fallible operations. Always handle errors appropriately:
|
||||
- Propagate errors with `?` when the calling function should handle them
|
||||
- Use `.log_err()` or similar when you need to ignore errors but want visibility
|
||||
- Use explicit error handling with `match` or `if let Err(...)` when you need custom logic
|
||||
- Example: avoid `let _ = client.request(...).await?;` - use `client.request(...).await?;` instead
|
||||
* When implementing async operations that may fail, ensure errors propagate to the UI layer so users get meaningful feedback.
|
||||
* Never create files with `mod.rs` paths - prefer `src/some_module.rs` instead of `src/some_module/mod.rs`.
|
||||
|
||||
# GPUI
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
"ensure_final_newline_on_save": true,
|
||||
"file_scan_exclusions": [
|
||||
"crates/assistant_tools/src/evals/fixtures",
|
||||
"crates/eval/worktrees/",
|
||||
"crates/eval/repos/",
|
||||
"**/.git",
|
||||
|
||||
102
Cargo.lock
generated
@@ -59,7 +59,7 @@ dependencies = [
|
||||
"assistant_slash_command",
|
||||
"assistant_slash_commands",
|
||||
"assistant_tool",
|
||||
"async-watch",
|
||||
"assistant_tools",
|
||||
"audio",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
@@ -99,6 +99,7 @@ dependencies = [
|
||||
"paths",
|
||||
"picker",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
@@ -114,6 +115,7 @@ dependencies = [
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"smol",
|
||||
"sqlez",
|
||||
"streaming_diff",
|
||||
"telemetry",
|
||||
"telemetry_events",
|
||||
@@ -129,10 +131,12 @@ dependencies = [
|
||||
"urlencoding",
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
"zed_llm_client",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -145,7 +149,6 @@ dependencies = [
|
||||
"deepseek",
|
||||
"fs",
|
||||
"gpui",
|
||||
"indexmap",
|
||||
"language_model",
|
||||
"lmstudio",
|
||||
"log",
|
||||
@@ -525,6 +528,7 @@ dependencies = [
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"indexed_docs",
|
||||
"indoc",
|
||||
"language",
|
||||
"language_model",
|
||||
"languages",
|
||||
@@ -649,6 +653,7 @@ dependencies = [
|
||||
"settings",
|
||||
"text",
|
||||
"util",
|
||||
"watch",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
"zlog",
|
||||
@@ -661,7 +666,6 @@ dependencies = [
|
||||
"agent_settings",
|
||||
"anyhow",
|
||||
"assistant_tool",
|
||||
"async-watch",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
"client",
|
||||
@@ -701,6 +705,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"streaming_diff",
|
||||
"strsim",
|
||||
"task",
|
||||
@@ -712,6 +717,7 @@ dependencies = [
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"web_search",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
@@ -1070,15 +1076,6 @@ dependencies = [
|
||||
"tungstenite 0.26.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-watch"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a078faf4e27c0c6cc0efb20e5da59dcccc04968ebf2801d8e0b2195124cdcdb2"
|
||||
dependencies = [
|
||||
"event-listener 2.5.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async_zip"
|
||||
version = "0.0.17"
|
||||
@@ -2983,7 +2980,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assistant_context_editor",
|
||||
"assistant_slash_command",
|
||||
"assistant_tool",
|
||||
"async-stripe",
|
||||
"async-trait",
|
||||
"async-tungstenite",
|
||||
@@ -3165,6 +3161,16 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "command-fds"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ec1052629a80c28594777d1252efc8a6b005d13f9edfd8c3fc0f44d5b32489a"
|
||||
dependencies = [
|
||||
"nix 0.30.1",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "command_palette"
|
||||
version = "0.1.0"
|
||||
@@ -4057,6 +4063,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"collections",
|
||||
"dap",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
@@ -4230,6 +4237,7 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
@@ -4539,6 +4547,8 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"command_palette",
|
||||
"gpui",
|
||||
"mdbook",
|
||||
"regex",
|
||||
"serde",
|
||||
@@ -4546,6 +4556,7 @@ dependencies = [
|
||||
"settings",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
"zed",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5006,7 +5017,6 @@ dependencies = [
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
"clap",
|
||||
@@ -5048,6 +5058,7 @@ dependencies = [
|
||||
"unindent",
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
"zed_llm_client",
|
||||
]
|
||||
@@ -7070,6 +7081,7 @@ dependencies = [
|
||||
"image",
|
||||
"inventory",
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
"lyon",
|
||||
"media",
|
||||
@@ -8731,7 +8743,6 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
@@ -8758,6 +8769,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"streaming-iterator",
|
||||
@@ -8780,6 +8792,7 @@ dependencies = [
|
||||
"unicase",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
"zlog",
|
||||
]
|
||||
@@ -8859,6 +8872,7 @@ dependencies = [
|
||||
"mistral",
|
||||
"ollama",
|
||||
"open_ai",
|
||||
"open_router",
|
||||
"partial-json-fixer",
|
||||
"project",
|
||||
"proto",
|
||||
@@ -10128,6 +10142,18 @@ dependencies = [
|
||||
"memoffset",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "node_runtime"
|
||||
version = "0.1.0"
|
||||
@@ -10137,7 +10163,6 @@ dependencies = [
|
||||
"async-std",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"futures 0.3.31",
|
||||
"http_client",
|
||||
"log",
|
||||
@@ -10147,6 +10172,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"smol",
|
||||
"util",
|
||||
"watch",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -10195,6 +10221,7 @@ dependencies = [
|
||||
"util",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10703,6 +10730,19 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "open_router"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.31",
|
||||
"http_client",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opener"
|
||||
version = "0.7.2"
|
||||
@@ -12982,7 +13022,6 @@ dependencies = [
|
||||
"askpass",
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-watch",
|
||||
"backtrace",
|
||||
"cargo_toml",
|
||||
"chrono",
|
||||
@@ -13029,6 +13068,7 @@ dependencies = [
|
||||
"toml 0.8.20",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"worktree",
|
||||
"zlog",
|
||||
]
|
||||
@@ -15708,6 +15748,7 @@ dependencies = [
|
||||
"task",
|
||||
"theme",
|
||||
"thiserror 2.0.12",
|
||||
"url",
|
||||
"util",
|
||||
"windows 0.61.1",
|
||||
"workspace-hack",
|
||||
@@ -16489,9 +16530,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.5"
|
||||
version = "0.25.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac5fff5c47490dfdf473b5228039bfacad9d765d9b6939d26bf7cc064c1c7822"
|
||||
checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -16504,9 +16545,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-bash"
|
||||
version = "0.23.3"
|
||||
version = "0.25.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "329a4d48623ac337d42b1df84e81a1c9dbb2946907c102ca72db158c1964a52e"
|
||||
checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -17104,12 +17145,14 @@ dependencies = [
|
||||
"async-fs",
|
||||
"async_zip",
|
||||
"collections",
|
||||
"command-fds",
|
||||
"dirs 4.0.0",
|
||||
"dunce",
|
||||
"futures 0.3.31",
|
||||
"futures-lite 1.13.0",
|
||||
"git2",
|
||||
"globset",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
@@ -17888,6 +17931,19 @@ dependencies = [
|
||||
"leb128",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watch"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ctor",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
"rand 0.8.5",
|
||||
"workspace-hack",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-backend"
|
||||
version = "0.3.8"
|
||||
@@ -19687,7 +19743,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.190.0"
|
||||
version = "0.191.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
@@ -19699,7 +19755,6 @@ dependencies = [
|
||||
"assistant_context_editor",
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-watch",
|
||||
"audio",
|
||||
"auto_update",
|
||||
"auto_update_ui",
|
||||
@@ -19816,6 +19871,7 @@ dependencies = [
|
||||
"uuid",
|
||||
"vim",
|
||||
"vim_mode_setting",
|
||||
"watch",
|
||||
"web_search",
|
||||
"web_search_providers",
|
||||
"welcome",
|
||||
|
||||
@@ -100,6 +100,7 @@ members = [
|
||||
"crates/notifications",
|
||||
"crates/ollama",
|
||||
"crates/open_ai",
|
||||
"crates/open_router",
|
||||
"crates/outline",
|
||||
"crates/outline_panel",
|
||||
"crates/panel",
|
||||
@@ -164,6 +165,7 @@ members = [
|
||||
"crates/util_macros",
|
||||
"crates/vim",
|
||||
"crates/vim_mode_setting",
|
||||
"crates/watch",
|
||||
"crates/web_search",
|
||||
"crates/web_search_providers",
|
||||
"crates/welcome",
|
||||
@@ -307,6 +309,7 @@ node_runtime = { path = "crates/node_runtime" }
|
||||
notifications = { path = "crates/notifications" }
|
||||
ollama = { path = "crates/ollama" }
|
||||
open_ai = { path = "crates/open_ai" }
|
||||
open_router = { path = "crates/open_router", features = ["schemars"] }
|
||||
outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
panel = { path = "crates/panel" }
|
||||
@@ -371,6 +374,7 @@ util = { path = "crates/util" }
|
||||
util_macros = { path = "crates/util_macros" }
|
||||
vim = { path = "crates/vim" }
|
||||
vim_mode_setting = { path = "crates/vim_mode_setting" }
|
||||
watch = { path = "crates/watch" }
|
||||
web_search = { path = "crates/web_search" }
|
||||
web_search_providers = { path = "crates/web_search_providers" }
|
||||
welcome = { path = "crates/welcome" }
|
||||
@@ -401,7 +405,6 @@ async-recursion = "1.0.0"
|
||||
async-tar = "0.5.0"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.29.1"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = [
|
||||
@@ -572,8 +575,8 @@ tokio = { version = "1" }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
|
||||
toml = "0.8"
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.5", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.23"
|
||||
tree-sitter = { version = "0.25.6", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = "0.23"
|
||||
tree-sitter-css = "0.23"
|
||||
|
||||
8
assets/icons/ai_open_router.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="currentColor" stroke="currentColor">
|
||||
<g clip-path="url(#clip0_205_3)">
|
||||
<path d="M0.094 7.78c0.469 0 2.281 -0.405 3.219 -0.936s0.938 -0.531 2.875 -1.906c2.453 -1.741 4.188 -1.158 7.031 -1.158" stroke-width="2.8125" />
|
||||
<path d="m15.969 3.797 -4.805 2.774V1.023z" />
|
||||
<path d="M0 7.781c0.469 0 2.281 0.405 3.219 0.936s0.938 0.531 2.875 1.906C8.547 12.364 10.281 11.781 13.125 11.781" stroke-width="2.8125" />
|
||||
<path d="m15.875 11.764 -4.805 -2.774v5.548z" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 575 B |
@@ -1,5 +1,4 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M17 20H16C14.9391 20 13.9217 19.6629 13.1716 19.0627C12.4214 18.4626 12 17.6487 12 16.8V7.2C12 6.35131 12.4214 5.53737 13.1716 4.93726C13.9217 4.33714 14.9391 4 16 4H17" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 20H8C9.06087 20 10.0783 19.5786 10.8284 18.8284C11.5786 18.0783 12 17.0609 12 16V15" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 4H8C9.06087 4 10.0783 4.42143 10.8284 5.17157C11.5786 5.92172 12 6.93913 12 8V9" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11 13H10.4C9.76346 13 9.15302 12.7893 8.70296 12.4142C8.25284 12.0391 8 11.5304 8 11V5C8 4.46957 8.25284 3.96086 8.70296 3.58579C9.15302 3.21071 9.76346 3 10.4 3H11" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5 13H5.6C6.23654 13 6.84698 12.7893 7.29704 12.4142C7.74716 12.0391 8 11.5304 8 11V5C8 4.46957 7.74716 3.96086 7.29704 3.58579C6.84698 3.21071 6.23654 3 5.6 3H5" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 715 B After Width: | Height: | Size: 617 B |
1
assets/icons/list_todo.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-list-todo-icon lucide-list-todo"><rect x="3" y="5" width="6" height="6" rx="1"/><path d="m3 17 2 2 4-4"/><path d="M13 6h8"/><path d="M13 12h8"/><path d="M13 18h8"/></svg>
|
||||
|
After Width: | Height: | Size: 373 B |
3
assets/icons/play_alt.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 3L13 8L4 13V3Z" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 214 B |
8
assets/icons/play_bug.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 12C2.35977 11.85 1 10.575 1 9" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M1.00875 15.2C1.00875 13.625 0.683456 12.275 4.00001 12.2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 9C7 10.575 5.62857 11.85 4 12" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4 12.2C6.98117 12.2 7 13.625 7 15.2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<rect x="2.5" y="9" width="3" height="6" rx="1.5" fill="black"/>
|
||||
<path d="M9 10L13 8L4 3V7.5" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 813 B |
@@ -1,3 +1,8 @@
|
||||
<svg width="17" height="17" viewBox="0 0 17 17" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.36667 3.79167C5.53364 3.79167 4.85833 4.46697 4.85833 5.3C4.85833 6.13303 5.53364 6.80833 6.36667 6.80833C7.1997 6.80833 7.875 6.13303 7.875 5.3C7.875 4.46697 7.1997 3.79167 6.36667 3.79167ZM2.1 5.925H3.67944C3.9626 7.14732 5.05824 8.05833 6.36667 8.05833C7.67509 8.05833 8.77073 7.14732 9.05389 5.925H14.9C15.2452 5.925 15.525 5.64518 15.525 5.3C15.525 4.95482 15.2452 4.675 14.9 4.675H9.05389C8.77073 3.45268 7.67509 2.54167 6.36667 2.54167C5.05824 2.54167 3.9626 3.45268 3.67944 4.675H2.1C1.75482 4.675 1.475 4.95482 1.475 5.3C1.475 5.64518 1.75482 5.925 2.1 5.925ZM13.3206 12.325C13.0374 13.5473 11.9418 14.4583 10.6333 14.4583C9.32491 14.4583 8.22927 13.5473 7.94611 12.325H2.1C1.75482 12.325 1.475 12.0452 1.475 11.7C1.475 11.3548 1.75482 11.075 2.1 11.075H7.94611C8.22927 9.85268 9.32491 8.94167 10.6333 8.94167C11.9418 8.94167 13.0374 9.85268 13.3206 11.075H14.9C15.2452 11.075 15.525 11.3548 15.525 11.7C15.525 12.0452 15.2452 12.325 14.9 12.325H13.3206ZM9.125 11.7C9.125 10.867 9.8003 10.1917 10.6333 10.1917C11.4664 10.1917 12.1417 10.867 12.1417 11.7C12.1417 12.533 11.4664 13.2083 10.6333 13.2083C9.8003 13.2083 9.125 12.533 9.125 11.7Z" fill="black"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 5H4" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M8 5L14 5" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M12 11L14 11" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M2 11H8" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<circle cx="6" cy="5" r="2" fill="black" fill-opacity="0.1" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<circle cx="10" cy="11" r="2" fill="black" fill-opacity="0.1" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 657 B |
@@ -1 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M6.97942 1.25171L6.9585 1.30199L5.58662 4.60039C5.54342 4.70426 5.44573 4.77523 5.3336 4.78422L1.7727 5.0697L1.71841 5.07405L1.38687 5.10063L1.08608 5.12475C0.820085 5.14607 0.712228 5.47802 0.914889 5.65162L1.14406 5.84793L1.39666 6.06431L1.43802 6.09974L4.15105 8.42374C4.23648 8.49692 4.2738 8.61176 4.24769 8.72118L3.41882 12.196L3.40618 12.249L3.32901 12.5725L3.25899 12.866C3.19708 13.1256 3.47945 13.3308 3.70718 13.1917L3.9647 13.0344L4.24854 12.861L4.29502 12.8326L7.34365 10.9705C7.43965 10.9119 7.5604 10.9119 7.6564 10.9705L10.705 12.8326L10.7515 12.861L11.0354 13.0344L11.2929 13.1917C11.5206 13.3308 11.803 13.1256 11.7411 12.866L11.671 12.5725L11.5939 12.249L11.5812 12.196L10.7524 8.72118C10.7263 8.61176 10.7636 8.49692 10.849 8.42374L13.562 6.09974L13.6034 6.06431L13.856 5.84793L14.0852 5.65162C14.2878 5.47802 14.18 5.14607 13.914 5.12475L13.6132 5.10063L13.2816 5.07405L13.2274 5.0697L9.66645 4.78422C9.55432 4.77523 9.45663 4.70426 9.41343 4.60039L8.04155 1.30199L8.02064 1.25171L7.89291 0.944609L7.77702 0.665992C7.67454 0.419604 7.32551 0.419604 7.22303 0.665992L7.10715 0.944609L6.97942 1.25171ZM7.50003 2.60397L6.50994 4.98442C6.32273 5.43453 5.89944 5.74207 5.41351 5.78103L2.84361 5.98705L4.8016 7.66428C5.17183 7.98142 5.33351 8.47903 5.2204 8.95321L4.62221 11.461L6.8224 10.1171C7.23842 9.86302 7.76164 9.86302 8.17766 10.1171L10.3778 11.461L9.77965 8.95321C9.66654 8.47903 9.82822 7.98142 10.1984 7.66428L12.1564 5.98705L9.58654 5.78103C9.10061 5.74207 8.67732 5.43453 8.49011 4.98442L7.50003 2.60397Z" fill="currentColor" fill-rule="evenodd" clip-rule="evenodd"></path></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.68323 1.53C7.71245 1.47097 7.75758 1.42129 7.81353 1.38655C7.86949 1.35181 7.93404 1.3334 7.9999 1.3334C8.06576 1.3334 8.13031 1.35181 8.18626 1.38655C8.24222 1.42129 8.28735 1.47097 8.31656 1.53L9.85656 4.64933C9.95802 4.85465 10.1078 5.03227 10.293 5.16697C10.4782 5.30167 10.6933 5.38941 10.9199 5.42267L14.3639 5.92667C14.4292 5.93612 14.4905 5.96365 14.5409 6.00613C14.5913 6.04862 14.6289 6.10437 14.6492 6.16707C14.6696 6.22978 14.6721 6.29694 14.6563 6.36096C14.6405 6.42498 14.6071 6.4833 14.5599 6.52933L12.0692 8.95467C11.905 9.11473 11.7821 9.31232 11.7111 9.53042C11.6402 9.74852 11.6233 9.98059 11.6619 10.2067L12.2499 13.6333C12.2614 13.6986 12.2544 13.7657 12.2296 13.8271C12.2048 13.8885 12.1632 13.9417 12.1096 13.9807C12.056 14.0196 11.9926 14.0427 11.9265 14.0473C11.8604 14.0519 11.7944 14.0378 11.7359 14.0067L8.65723 12.388C8.45438 12.2815 8.22868 12.2258 7.99956 12.2258C7.77044 12.2258 7.54475 12.2815 7.3419 12.388L4.2639 14.0067C4.20545 14.0376 4.1395 14.0515 4.07353 14.0468C4.00757 14.0421 3.94424 14.019 3.89076 13.9801C3.83728 13.9413 3.79579 13.8881 3.771 13.8268C3.74622 13.7655 3.73914 13.6985 3.75056 13.6333L4.3379 10.2073C4.3767 9.98116 4.35989 9.74893 4.28892 9.5307C4.21796 9.31246 4.09497 9.11477 3.93056 8.95467L1.4399 6.53C1.39229 6.48402 1.35856 6.4256 1.34254 6.36138C1.32652 6.29717 1.32886 6.22975 1.34928 6.16679C1.36971 6.10384 1.40741 6.04789 1.45808 6.00532C1.50876 5.96275 1.57037 5.93527 1.6359 5.926L5.07923 5.42267C5.30607 5.38967 5.52149 5.30204 5.70695 5.16733C5.89242 5.03261 6.04237 4.85485 6.1439 4.64933L7.68323 1.53Z" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
@@ -1 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M7.22303 0.665992C7.32551 0.419604 7.67454 0.419604 7.77702 0.665992L9.41343 4.60039C9.45663 4.70426 9.55432 4.77523 9.66645 4.78422L13.914 5.12475C14.18 5.14607 14.2878 5.47802 14.0852 5.65162L10.849 8.42374C10.7636 8.49692 10.7263 8.61176 10.7524 8.72118L11.7411 12.866C11.803 13.1256 11.5206 13.3308 11.2929 13.1917L7.6564 10.9705C7.5604 10.9119 7.43965 10.9119 7.34365 10.9705L3.70718 13.1917C3.47945 13.3308 3.19708 13.1256 3.25899 12.866L4.24769 8.72118C4.2738 8.61176 4.23648 8.49692 4.15105 8.42374L0.914889 5.65162C0.712228 5.47802 0.820086 5.14607 1.08608 5.12475L5.3336 4.78422C5.44573 4.77523 5.54342 4.70426 5.58662 4.60039L7.22303 0.665992Z" fill="currentColor"></path></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.68323 1.53C7.71245 1.47097 7.75758 1.42129 7.81353 1.38655C7.86949 1.35181 7.93404 1.3334 7.9999 1.3334C8.06576 1.3334 8.13031 1.35181 8.18626 1.38655C8.24222 1.42129 8.28735 1.47097 8.31656 1.53L9.85656 4.64933C9.95802 4.85465 10.1078 5.03227 10.293 5.16697C10.4782 5.30167 10.6933 5.38941 10.9199 5.42267L14.3639 5.92667C14.4292 5.93612 14.4905 5.96365 14.5409 6.00613C14.5913 6.04862 14.6289 6.10437 14.6492 6.16707C14.6696 6.22978 14.6721 6.29694 14.6563 6.36096C14.6405 6.42498 14.6071 6.4833 14.5599 6.52933L12.0692 8.95467C11.905 9.11473 11.7821 9.31232 11.7111 9.53042C11.6402 9.74852 11.6233 9.98059 11.6619 10.2067L12.2499 13.6333C12.2614 13.6986 12.2544 13.7657 12.2296 13.8271C12.2048 13.8885 12.1632 13.9417 12.1096 13.9807C12.056 14.0196 11.9926 14.0427 11.9265 14.0473C11.8604 14.0519 11.7944 14.0378 11.7359 14.0067L8.65723 12.388C8.45438 12.2815 8.22868 12.2258 7.99956 12.2258C7.77044 12.2258 7.54475 12.2815 7.3419 12.388L4.2639 14.0067C4.20545 14.0376 4.1395 14.0515 4.07353 14.0468C4.00757 14.0421 3.94424 14.019 3.89076 13.9801C3.83728 13.9413 3.79579 13.8881 3.771 13.8268C3.74622 13.7655 3.73914 13.6985 3.75056 13.6333L4.3379 10.2073C4.3767 9.98116 4.35989 9.74893 4.28892 9.5307C4.21796 9.31246 4.09497 9.11477 3.93056 8.95467L1.4399 6.53C1.39229 6.48402 1.35856 6.4256 1.34254 6.36138C1.32652 6.29717 1.32886 6.22975 1.34928 6.16679C1.36971 6.10384 1.40741 6.04789 1.45808 6.00532C1.50876 5.96275 1.57037 5.93527 1.6359 5.926L5.07923 5.42267C5.30607 5.38967 5.52149 5.30204 5.70695 5.16733C5.89242 5.03261 6.04237 4.85485 6.1439 4.64933L7.68323 1.53Z" fill="black" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 794 B After Width: | Height: | Size: 1.7 KiB |
@@ -1,5 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 1.75L5.88467 5.14092C5.82759 5.31446 5.73055 5.47218 5.60136 5.60136C5.47218 5.73055 5.31446 5.82759 5.14092 5.88467L1.75 7L5.14092 8.11533C5.31446 8.17241 5.47218 8.26945 5.60136 8.39864C5.73055 8.52782 5.82759 8.68554 5.88467 8.85908L7 12.25L8.11533 8.85908C8.17241 8.68554 8.26945 8.52782 8.39864 8.39864C8.52782 8.26945 8.68554 8.17241 8.85908 8.11533L12.25 7L8.85908 5.88467C8.68554 5.82759 8.52782 5.73055 8.39864 5.60136C8.26945 5.47218 8.17241 5.31446 8.11533 5.14092L7 1.75Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.91667 1.75V4.08333M1.75 2.91667H4.08333" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.0833 9.91667V12.25M9.91667 11.0833H12.25" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8 2L6.72534 5.87534C6.6601 6.07367 6.5492 6.25392 6.40155 6.40155C6.25392 6.5492 6.07367 6.6601 5.87534 6.72534L2 8L5.87534 9.27466C6.07367 9.3399 6.25392 9.4508 6.40155 9.59845C6.5492 9.74608 6.6601 9.92633 6.72534 10.1247L8 14L9.27466 10.1247C9.3399 9.92633 9.4508 9.74608 9.59845 9.59845C9.74608 9.4508 9.92633 9.3399 10.1247 9.27466L14 8L10.1247 6.72534C9.92633 6.6601 9.74608 6.5492 9.59845 6.40155C9.4508 6.25392 9.3399 6.07367 9.27466 5.87534L8 2Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3.33334 2V4.66666M2 3.33334H4.66666" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12.6665 11.3333V14M11.3333 12.6666H13.9999" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 998 B |
@@ -31,13 +31,11 @@
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"alt-f4": "debugger::RerunLastSession",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"ctrl-shift-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"cmd-f11": "debugger::StepInto",
|
||||
"ctrl-f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
@@ -61,7 +59,6 @@
|
||||
"tab": "editor::Tab",
|
||||
"shift-tab": "editor::Backtab",
|
||||
"ctrl-k": "editor::CutToEndOfLine",
|
||||
// "ctrl-t": "editor::Transpose",
|
||||
"ctrl-k ctrl-q": "editor::Rewrap",
|
||||
"ctrl-k q": "editor::Rewrap",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
@@ -102,27 +99,22 @@
|
||||
"shift-down": "editor::SelectDown",
|
||||
"shift-left": "editor::SelectLeft",
|
||||
"shift-right": "editor::SelectRight",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
|
||||
"ctrl-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
|
||||
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextWordEnd",
|
||||
"ctrl-shift-home": "editor::SelectToBeginning",
|
||||
"ctrl-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-a": "editor::SelectAll",
|
||||
"ctrl-l": "editor::SelectLine",
|
||||
"ctrl-shift-i": "editor::Format",
|
||||
"alt-shift-o": "editor::OrganizeImports",
|
||||
// "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
// "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
// "cmd-shift-right": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
// "ctrl-shift-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
"shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
// "alt-v": ["editor::MovePageUp", { "center_cursor": true }],
|
||||
"ctrl-alt-space": "editor::ShowCharacterPalette",
|
||||
"ctrl-;": "editor::ToggleLineNumbers",
|
||||
"ctrl-'": "editor::ToggleSelectedDiffHunks",
|
||||
"ctrl-\"": "editor::ExpandAllDiffHunks",
|
||||
"ctrl-i": "editor::ShowSignatureHelp",
|
||||
"alt-g b": "editor::ToggleGitBlame",
|
||||
"alt-g b": "git::Blame",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
@@ -142,7 +134,6 @@
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": "buffer_search::DeployReplace",
|
||||
// "cmd-e": ["buffer_search::Deploy", { "focus": false }],
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
"ctrl-alt-e": "editor::SelectEnclosingSymbol",
|
||||
@@ -155,8 +146,7 @@
|
||||
"context": "Editor && mode == full && edit_prediction",
|
||||
"bindings": {
|
||||
"alt-]": "editor::NextEditPrediction",
|
||||
"alt-[": "editor::PreviousEditPrediction",
|
||||
"alt-right": "editor::AcceptPartialEditPrediction"
|
||||
"alt-[": "editor::PreviousEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -221,7 +211,6 @@
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"save": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
"ctrl-r": "assistant::CycleMessageRole",
|
||||
@@ -247,6 +236,7 @@
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
@@ -270,8 +260,8 @@
|
||||
{
|
||||
"context": "AgentPanel && prompt_editor",
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
"ctrl-n": "agent::NewTextThread",
|
||||
"ctrl-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -280,7 +270,9 @@
|
||||
"enter": "agent::Chat",
|
||||
"ctrl-enter": "agent::ChatWithFollow",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff",
|
||||
"ctrl-shift-y": "agent::KeepAll",
|
||||
"ctrl-shift-n": "agent::RejectAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -512,14 +504,14 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"alt-open": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }],
|
||||
"alt-open": "projects::OpenRecent",
|
||||
"alt-ctrl-o": "projects::OpenRecent",
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
"alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
"alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
// Change to open path modal for existing remote connection by setting the parameter
|
||||
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
|
||||
"alt-ctrl-shift-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
@@ -583,11 +575,24 @@
|
||||
"ctrl-alt-r": "task::Rerun",
|
||||
"alt-t": "task::Rerun",
|
||||
"alt-shift-t": "task::Spawn",
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }]
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }],
|
||||
// also possible to spawn tasks by name:
|
||||
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
|
||||
// or by tag:
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
"f5": "debugger::RerunLastSession"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_running",
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_stopped",
|
||||
"bindings": {
|
||||
"f5": "debugger::Continue"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -649,14 +654,16 @@
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"alt-l": "editor::AcceptEditPrediction",
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
"tab": "editor::AcceptEditPrediction",
|
||||
"alt-right": "editor::AcceptPartialEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction_conflict",
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"alt-l": "editor::AcceptEditPrediction"
|
||||
"alt-l": "editor::AcceptEditPrediction",
|
||||
"alt-right": "editor::AcceptPartialEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -873,7 +880,8 @@
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"ctrl-t": "debugger::ToggleThreadPicker",
|
||||
"ctrl-i": "debugger::ToggleSessionPicker"
|
||||
"ctrl-i": "debugger::ToggleSessionPicker",
|
||||
"shift-alt-escape": "debugger::ToggleExpandItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -897,7 +905,9 @@
|
||||
"context": "CollabPanel && not_editing",
|
||||
"bindings": {
|
||||
"ctrl-backspace": "collab_panel::Remove",
|
||||
"space": "menu::Confirm"
|
||||
"space": "menu::Confirm",
|
||||
"ctrl-up": "collab_panel::MoveChannelUp",
|
||||
"ctrl-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -928,6 +938,13 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"bindings": {
|
||||
"ctrl-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"ctrl-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
|
||||
"bindings": {
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f4": "debugger::Start",
|
||||
"alt-f4": "debugger::RerunLastSession",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"shift-cmd-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
@@ -140,7 +138,7 @@
|
||||
"cmd-;": "editor::ToggleLineNumbers",
|
||||
"cmd-'": "editor::ToggleSelectedDiffHunks",
|
||||
"cmd-\"": "editor::ExpandAllDiffHunks",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame",
|
||||
"cmd-alt-g b": "git::Blame",
|
||||
"cmd-i": "editor::ShowSignatureHelp",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint",
|
||||
@@ -183,8 +181,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-tab": "editor::NextEditPrediction",
|
||||
"alt-shift-tab": "editor::PreviousEditPrediction",
|
||||
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
|
||||
"alt-shift-tab": "editor::PreviousEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -255,7 +252,6 @@
|
||||
"bindings": {
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
"ctrl-r": "assistant::CycleMessageRole",
|
||||
@@ -282,6 +278,7 @@
|
||||
"cmd-shift-j": "agent::ToggleNavigationMenu",
|
||||
"cmd-shift-i": "agent::ToggleOptionsMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-alt-e": "agent::RemoveAllContext",
|
||||
"cmd-shift-e": "project_panel::ToggleFocus",
|
||||
"cmd-shift-enter": "agent::ContinueThread",
|
||||
@@ -317,7 +314,9 @@
|
||||
"enter": "agent::Chat",
|
||||
"cmd-enter": "agent::ChatWithFollow",
|
||||
"cmd-i": "agent::ToggleProfileSelector",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff",
|
||||
"cmd-shift-y": "agent::KeepAll",
|
||||
"cmd-shift-n": "agent::RejectAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -584,9 +583,9 @@
|
||||
"bindings": {
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"ctrl-cmd-o": "projects::OpenRemote",
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true }],
|
||||
"alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
"ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }],
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
@@ -635,7 +634,8 @@
|
||||
"cmd-k shift-right": "workspace::SwapPaneRight",
|
||||
"cmd-k shift-up": "workspace::SwapPaneUp",
|
||||
"cmd-k shift-down": "workspace::SwapPaneDown",
|
||||
"cmd-shift-x": "zed::Extensions"
|
||||
"cmd-shift-x": "zed::Extensions",
|
||||
"f5": "debugger::RerunLastSession"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -652,6 +652,20 @@
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_running",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_stopped",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "debugger::Continue"
|
||||
}
|
||||
},
|
||||
// Bindings from Sublime Text
|
||||
{
|
||||
"context": "Editor",
|
||||
@@ -704,14 +718,16 @@
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
"tab": "editor::AcceptEditPrediction",
|
||||
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction_conflict",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction"
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -936,7 +952,8 @@
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"cmd-t": "debugger::ToggleThreadPicker",
|
||||
"cmd-i": "debugger::ToggleSessionPicker"
|
||||
"cmd-i": "debugger::ToggleSessionPicker",
|
||||
"shift-alt-escape": "debugger::ToggleExpandItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -951,7 +968,9 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-backspace": "collab_panel::Remove",
|
||||
"space": "menu::Confirm"
|
||||
"space": "menu::Confirm",
|
||||
"cmd-up": "collab_panel::MoveChannelUp",
|
||||
"cmd-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -987,6 +1006,14 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"cmd-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
|
||||
"use_key_equivalents": true,
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor",
|
||||
"context": "Editor && vim_mode == insert && !menu",
|
||||
"bindings": {
|
||||
// "j k": ["workspace::SendKeystrokes", "escape"]
|
||||
// "j k": "vim::SwitchToNormalMode"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"ctrl-shift-d": "editor::DuplicateSelection",
|
||||
"alt-f3": "editor::SelectAllMatches", // find_all_under
|
||||
// "ctrl-f3": "", // find_under (cancels any selections)
|
||||
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
// "ctrl-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
"f9": "editor::SortLinesCaseSensitive",
|
||||
"ctrl-f9": "editor::SortLinesCaseInsensitive",
|
||||
"f12": "editor::GoToDefinition",
|
||||
@@ -51,7 +51,11 @@
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"alt-right": "editor::MoveToNextSubwordEnd",
|
||||
"alt-left": "editor::MoveToPreviousSubwordStart",
|
||||
"alt-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"alt-shift-left": "editor::SelectToPreviousSubwordStart"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -28,7 +28,8 @@
|
||||
"context": "InlineAssistEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-backspace": "editor::Cancel"
|
||||
"cmd-shift-backspace": "editor::Cancel",
|
||||
"cmd-enter": "menu::Confirm"
|
||||
// "alt-enter": // Quick Question
|
||||
// "cmd-shift-enter": // Full File Context
|
||||
// "cmd-shift-k": // Toggle input focus (editor <> inline assist)
|
||||
|
||||
@@ -53,7 +53,11 @@
|
||||
"cmd-shift-j": "editor::JoinLines",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"ctrl-right": "editor::MoveToNextSubwordEnd",
|
||||
"ctrl-left": "editor::MoveToPreviousSubwordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousSubwordStart"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -198,6 +198,8 @@
|
||||
"9": ["vim::Number", 9],
|
||||
"ctrl-w d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w g d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w ]": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w ctrl-]": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
"ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
"ctrl-w space": "editor::OpenExcerptsSplit",
|
||||
@@ -709,7 +711,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || DebugPanel",
|
||||
"context": "AgentPanel || GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || DebugPanel",
|
||||
"bindings": {
|
||||
// window related commands (ctrl-w X)
|
||||
"ctrl-w": null,
|
||||
@@ -838,6 +840,19 @@
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "MessageEditor > Editor && VimControl",
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
// TODO: Implement search
|
||||
"/": null,
|
||||
"?": null,
|
||||
"#": null,
|
||||
"*": null,
|
||||
"n": null,
|
||||
"shift-n": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "os != macos && Editor && edit_prediction_conflict",
|
||||
"bindings": {
|
||||
|
||||
@@ -17,13 +17,13 @@ You are a highly skilled software engineer with extensive knowledge in many prog
|
||||
4. Use only the tools that are currently available.
|
||||
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
|
||||
6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers.
|
||||
7. Avoid HTML entity escaping - use plain characters instead.
|
||||
|
||||
## Searching and Reading
|
||||
|
||||
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
|
||||
|
||||
{{! TODO: If there are files, we should mention it but otherwise omit that fact }}
|
||||
{{#if has_tools}}
|
||||
If appropriate, use tool calls to explore the current project, which contains the following root directories:
|
||||
|
||||
{{#each worktrees}}
|
||||
@@ -38,7 +38,6 @@ If appropriate, use tool calls to explore the current project, which contains th
|
||||
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
|
||||
- The user might specify a partial file path. If you don't know the full path, use `find_path` (not `grep`) before you read the file.
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
{{else}}
|
||||
You are being tasked with providing a response, but you have no ability to use tools or to read or write any aspect of the user's system (other than any context the user might have provided to you).
|
||||
|
||||
|
||||
@@ -73,9 +73,6 @@
|
||||
"unnecessary_code_fade": 0.3,
|
||||
// Active pane styling settings.
|
||||
"active_pane_modifiers": {
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"magnification": 1.0,
|
||||
// Inset border size of the active pane, in pixels.
|
||||
"border_size": 0.0,
|
||||
// Opacity of the inactive panes. 0 means transparent, 1 means opaque.
|
||||
@@ -104,9 +101,12 @@
|
||||
// The second option is decimal.
|
||||
"unit": "binary"
|
||||
},
|
||||
// The key to use for adding multiple cursors
|
||||
// Currently "alt" or "cmd_or_ctrl" (also aliased as
|
||||
// "cmd" and "ctrl") are supported.
|
||||
// Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
|
||||
//
|
||||
// 1. Maps to `Alt` on Linux and Windows and to `Option` on MacOS:
|
||||
// "alt"
|
||||
// 2. Maps `Control` on Linux and Windows and to `Command` on MacOS:
|
||||
// "cmd_or_ctrl" (alias: "cmd", "ctrl")
|
||||
"multi_cursor_modifier": "alt",
|
||||
// Whether to enable vim modes and key bindings.
|
||||
"vim_mode": false,
|
||||
@@ -128,6 +128,8 @@
|
||||
//
|
||||
// Default: true
|
||||
"restore_on_file_reopen": true,
|
||||
// Whether to automatically close files that have been deleted on disk.
|
||||
"close_on_file_delete": false,
|
||||
// Size of the drop target in the editor.
|
||||
"drop_target_size": 0.2,
|
||||
// Whether the window should be closed when using 'close active item' on a window with no tabs.
|
||||
@@ -215,6 +217,8 @@
|
||||
"show_signature_help_after_edits": false,
|
||||
// Whether to show code action button at start of buffer line.
|
||||
"inline_code_actions": true,
|
||||
// Whether to allow drag and drop text selection in buffer.
|
||||
"drag_and_drop_selection": true,
|
||||
// What to do when go to definition yields no results.
|
||||
//
|
||||
// 1. Do nothing: `none`
|
||||
@@ -534,6 +538,9 @@
|
||||
"function": false
|
||||
}
|
||||
},
|
||||
// Whether to resize all the panels in a dock when resizing the dock.
|
||||
// Can be a combination of "left", "right" and "bottom".
|
||||
"resize_all_panels_in_dock": ["left"],
|
||||
"project_panel": {
|
||||
// Whether to show the project panel button in the status bar
|
||||
"button": true,
|
||||
@@ -597,7 +604,9 @@
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
}
|
||||
},
|
||||
// Whether to hide the root entry when only one folder is open in the window.
|
||||
"hide_root": false
|
||||
},
|
||||
"outline_panel": {
|
||||
// Whether to show the outline panel button in the status bar
|
||||
@@ -731,13 +740,6 @@
|
||||
// The model to use.
|
||||
"model": "claude-sonnet-4"
|
||||
},
|
||||
// The model to use when applying edits from the agent.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-sonnet-4"
|
||||
},
|
||||
// Additional parameters for language model requests. When making a request to a model, parameters will be taken
|
||||
// from the last entry in this list that matches the model's provider and name. In each entry, both provider
|
||||
// and model are optional, so that you can specify parameters for either one.
|
||||
@@ -776,7 +778,6 @@
|
||||
"tools": {
|
||||
"copy_path": true,
|
||||
"create_directory": true,
|
||||
"create_file": true,
|
||||
"delete_path": true,
|
||||
"diagnostics": true,
|
||||
"edit_file": true,
|
||||
@@ -1039,6 +1040,14 @@
|
||||
"button": true,
|
||||
// Whether to show warnings or not by default.
|
||||
"include_warnings": true,
|
||||
// Settings for using LSP pull diagnostics mechanism in Zed.
|
||||
"lsp_pull_diagnostics": {
|
||||
// Whether to pull for diagnostics or not.
|
||||
"enabled": true,
|
||||
// Minimum time to wait before pulling diagnostics from the language server(s).
|
||||
// 0 turns the debounce off.
|
||||
"debounce_ms": 50
|
||||
},
|
||||
// Settings for inline diagnostics
|
||||
"inline": {
|
||||
// Whether to show diagnostics inline or not
|
||||
@@ -1462,7 +1471,9 @@
|
||||
"language_servers": ["erlang-ls", "!elp", "..."]
|
||||
},
|
||||
"Git Commit": {
|
||||
"allow_rewrap": "anywhere"
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "editor_width",
|
||||
"preferred_line_length": 72
|
||||
},
|
||||
"Go": {
|
||||
"code_actions_on_format": {
|
||||
@@ -1505,11 +1516,11 @@
|
||||
}
|
||||
},
|
||||
"LaTeX": {
|
||||
"format_on_save": "on",
|
||||
"formatter": "language_server",
|
||||
"language_servers": ["texlab", "..."],
|
||||
"prettier": {
|
||||
"allowed": false
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-latex"]
|
||||
}
|
||||
},
|
||||
"Markdown": {
|
||||
@@ -1533,19 +1544,13 @@
|
||||
"allow_rewrap": "anywhere"
|
||||
},
|
||||
"Ruby": {
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "..."]
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."]
|
||||
},
|
||||
"SCSS": {
|
||||
"prettier": {
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"SQL": {
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-sql"]
|
||||
}
|
||||
},
|
||||
"Starlark": {
|
||||
"language_servers": ["starpls", "!buck2-lsp", "..."]
|
||||
},
|
||||
@@ -1610,6 +1615,9 @@
|
||||
"version": "1",
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
},
|
||||
"open_router": {
|
||||
"api_url": "https://openrouter.ai/api/v1"
|
||||
},
|
||||
"lmstudio": {
|
||||
"api_url": "http://localhost:1234/api/v0"
|
||||
},
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
// Some example tasks for common languages.
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[
|
||||
{
|
||||
"label": "Debug active PHP file",
|
||||
|
||||
5
assets/settings/initial_local_debug_tasks.json
Normal file
@@ -0,0 +1,5 @@
|
||||
// Project-local debug tasks
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[]
|
||||
@@ -261,6 +261,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#bfbdb6ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d2a6ffff",
|
||||
"font_style": null,
|
||||
@@ -316,6 +321,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#d2a6ffff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#5ac1feff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#a9d94bff",
|
||||
"font_style": null,
|
||||
@@ -442,9 +457,9 @@
|
||||
"terminal.foreground": "#5c6166ff",
|
||||
"terminal.bright_foreground": "#5c6166ff",
|
||||
"terminal.dim_foreground": "#fcfcfcff",
|
||||
"terminal.ansi.black": "#fcfcfcff",
|
||||
"terminal.ansi.bright_black": "#bcbec0ff",
|
||||
"terminal.ansi.dim_black": "#5c6166ff",
|
||||
"terminal.ansi.black": "#5c6166ff",
|
||||
"terminal.ansi.bright_black": "#3b9ee5ff",
|
||||
"terminal.ansi.dim_black": "#9c9fa2ff",
|
||||
"terminal.ansi.red": "#ef7271ff",
|
||||
"terminal.ansi.bright_red": "#febab6ff",
|
||||
"terminal.ansi.dim_red": "#833538ff",
|
||||
@@ -463,9 +478,9 @@
|
||||
"terminal.ansi.cyan": "#4dbf99ff",
|
||||
"terminal.ansi.bright_cyan": "#ace0cbff",
|
||||
"terminal.ansi.dim_cyan": "#2a5f4aff",
|
||||
"terminal.ansi.white": "#5c6166ff",
|
||||
"terminal.ansi.bright_white": "#5c6166ff",
|
||||
"terminal.ansi.dim_white": "#9c9fa2ff",
|
||||
"terminal.ansi.white": "#fcfcfcff",
|
||||
"terminal.ansi.bright_white": "#fcfcfcff",
|
||||
"terminal.ansi.dim_white": "#bcbec0ff",
|
||||
"link_text.hover": "#3b9ee5ff",
|
||||
"conflict": "#f1ad49ff",
|
||||
"conflict.background": "#ffeedaff",
|
||||
@@ -632,6 +647,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#5c6166ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#a37accff",
|
||||
"font_style": null,
|
||||
@@ -687,6 +707,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#a37accff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#3b9ee5ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#86b300ff",
|
||||
"font_style": null,
|
||||
@@ -1003,6 +1033,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#cccac2ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#dfbfffff",
|
||||
"font_style": null,
|
||||
@@ -1058,6 +1093,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#dfbfffff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#72cffeff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#d4fe7fff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -270,6 +270,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d3869bff",
|
||||
"font_style": null,
|
||||
@@ -325,6 +330,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#fabd2eff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#b8bb25ff",
|
||||
"font_style": null,
|
||||
@@ -655,6 +670,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d3869bff",
|
||||
"font_style": null,
|
||||
@@ -710,6 +730,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#fabd2eff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#b8bb25ff",
|
||||
"font_style": null,
|
||||
@@ -1040,6 +1070,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d3869bff",
|
||||
"font_style": null,
|
||||
@@ -1095,6 +1130,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#fabd2eff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#b8bb25ff",
|
||||
"font_style": null,
|
||||
@@ -1227,9 +1272,9 @@
|
||||
"terminal.foreground": "#282828ff",
|
||||
"terminal.bright_foreground": "#282828ff",
|
||||
"terminal.dim_foreground": "#fbf1c7ff",
|
||||
"terminal.ansi.black": "#fbf1c7ff",
|
||||
"terminal.ansi.bright_black": "#b0a189ff",
|
||||
"terminal.ansi.dim_black": "#282828ff",
|
||||
"terminal.ansi.black": "#282828ff",
|
||||
"terminal.ansi.bright_black": "#0b6678ff",
|
||||
"terminal.ansi.dim_black": "#5f5650ff",
|
||||
"terminal.ansi.red": "#9d0308ff",
|
||||
"terminal.ansi.bright_red": "#db8b7aff",
|
||||
"terminal.ansi.dim_red": "#4e1207ff",
|
||||
@@ -1248,9 +1293,9 @@
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
"terminal.ansi.white": "#282828ff",
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"terminal.ansi.white": "#fbf1c7ff",
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
@@ -1425,6 +1470,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#8f3e71ff",
|
||||
"font_style": null,
|
||||
@@ -1480,6 +1530,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#b57613ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#79740eff",
|
||||
"font_style": null,
|
||||
@@ -1612,9 +1672,9 @@
|
||||
"terminal.foreground": "#282828ff",
|
||||
"terminal.bright_foreground": "#282828ff",
|
||||
"terminal.dim_foreground": "#f9f5d7ff",
|
||||
"terminal.ansi.black": "#f9f5d7ff",
|
||||
"terminal.ansi.bright_black": "#b0a189ff",
|
||||
"terminal.ansi.dim_black": "#282828ff",
|
||||
"terminal.ansi.black": "#282828ff",
|
||||
"terminal.ansi.bright_black": "#73675eff",
|
||||
"terminal.ansi.dim_black": "#f9f5d7ff",
|
||||
"terminal.ansi.red": "#9d0308ff",
|
||||
"terminal.ansi.bright_red": "#db8b7aff",
|
||||
"terminal.ansi.dim_red": "#4e1207ff",
|
||||
@@ -1633,9 +1693,9 @@
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
"terminal.ansi.white": "#282828ff",
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"terminal.ansi.white": "#f9f5d7ff",
|
||||
"terminal.ansi.bright_white": "#f9f5d7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
@@ -1810,6 +1870,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#8f3e71ff",
|
||||
"font_style": null,
|
||||
@@ -1865,6 +1930,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#b57613ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#79740eff",
|
||||
"font_style": null,
|
||||
@@ -1997,9 +2072,9 @@
|
||||
"terminal.foreground": "#282828ff",
|
||||
"terminal.bright_foreground": "#282828ff",
|
||||
"terminal.dim_foreground": "#f2e5bcff",
|
||||
"terminal.ansi.black": "#f2e5bcff",
|
||||
"terminal.ansi.bright_black": "#b0a189ff",
|
||||
"terminal.ansi.dim_black": "#282828ff",
|
||||
"terminal.ansi.black": "#282828ff",
|
||||
"terminal.ansi.bright_black": "#73675eff",
|
||||
"terminal.ansi.dim_black": "#f2e5bcff",
|
||||
"terminal.ansi.red": "#9d0308ff",
|
||||
"terminal.ansi.bright_red": "#db8b7aff",
|
||||
"terminal.ansi.dim_red": "#4e1207ff",
|
||||
@@ -2018,9 +2093,9 @@
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
"terminal.ansi.white": "#282828ff",
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"terminal.ansi.white": "#f2e5bcff",
|
||||
"terminal.ansi.bright_white": "#f2e5bcff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
@@ -2195,6 +2270,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#8f3e71ff",
|
||||
"font_style": null,
|
||||
@@ -2250,6 +2330,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#b57613ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#79740eff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -99,6 +99,8 @@
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"version_control.conflict_marker.ours": "#a1c1811a",
|
||||
"version_control.conflict_marker.theirs": "#74ade81a",
|
||||
"conflict": "#dec184ff",
|
||||
"conflict.background": "#dec1841a",
|
||||
"conflict.border": "#5d4c2fff",
|
||||
@@ -264,6 +266,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#dce0e5ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#bf956aff",
|
||||
"font_style": null,
|
||||
@@ -319,6 +326,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#dfc184ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#74ade8ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#a1c181ff",
|
||||
"font_style": null,
|
||||
@@ -450,9 +467,9 @@
|
||||
"terminal.foreground": "#242529ff",
|
||||
"terminal.bright_foreground": "#242529ff",
|
||||
"terminal.dim_foreground": "#fafafaff",
|
||||
"terminal.ansi.black": "#fafafaff",
|
||||
"terminal.ansi.bright_black": "#aaaaaaff",
|
||||
"terminal.ansi.dim_black": "#242529ff",
|
||||
"terminal.ansi.black": "#242529ff",
|
||||
"terminal.ansi.bright_black": "#242529ff",
|
||||
"terminal.ansi.dim_black": "#97979aff",
|
||||
"terminal.ansi.red": "#d36151ff",
|
||||
"terminal.ansi.bright_red": "#f0b0a4ff",
|
||||
"terminal.ansi.dim_red": "#6f312aff",
|
||||
@@ -471,9 +488,9 @@
|
||||
"terminal.ansi.cyan": "#3a82b7ff",
|
||||
"terminal.ansi.bright_cyan": "#a3bedaff",
|
||||
"terminal.ansi.dim_cyan": "#254058ff",
|
||||
"terminal.ansi.white": "#242529ff",
|
||||
"terminal.ansi.bright_white": "#242529ff",
|
||||
"terminal.ansi.dim_white": "#97979aff",
|
||||
"terminal.ansi.white": "#fafafaff",
|
||||
"terminal.ansi.bright_white": "#fafafaff",
|
||||
"terminal.ansi.dim_white": "#aaaaaaff",
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
@@ -643,6 +660,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#242529ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#ad6e25ff",
|
||||
"font_style": null,
|
||||
@@ -698,6 +720,16 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#669f59ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#5c78e2ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#649f57ff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -25,7 +25,6 @@ assistant_context_editor.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
assistant_slash_commands.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-watch.workspace = true
|
||||
audio.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
chrono.workspace = true
|
||||
@@ -46,6 +45,7 @@ git.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
indoc.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
inventory.workspace = true
|
||||
@@ -78,6 +78,7 @@ serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
sqlez.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
@@ -93,17 +94,21 @@ ui_input.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
watch.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
assistant_tools.workspace = true
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, "features" = ["test-support"] }
|
||||
language_model = { workspace = true, "features" = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use crate::AgentPanel;
|
||||
use crate::context::{AgentContextHandle, RULES_ICON};
|
||||
use crate::context_picker::{ContextPicker, MentionLink};
|
||||
use crate::context_store::ContextStore;
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::message_editor::insert_message_creases;
|
||||
use crate::message_editor::{extract_message_creases, insert_message_creases};
|
||||
use crate::thread::{
|
||||
LastRestoreCheckpoint, MessageCrease, MessageId, MessageSegment, Thread, ThreadError,
|
||||
ThreadEvent, ThreadFeedback, ThreadSummary,
|
||||
@@ -13,6 +12,7 @@ use crate::tool_use::{PendingToolUseStatus, ToolUse};
|
||||
use crate::ui::{
|
||||
AddedContext, AgentNotification, AgentNotificationEvent, AnimatedLabel, ContextPill,
|
||||
};
|
||||
use crate::{AgentPanel, ModelUsageContext};
|
||||
use agent_settings::{AgentSettings, NotifyWhenAgentWaiting};
|
||||
use anyhow::Context as _;
|
||||
use assistant_tool::ToolUseStatus;
|
||||
@@ -999,7 +999,7 @@ impl ActiveThread {
|
||||
ThreadEvent::Stopped(reason) => match reason {
|
||||
Ok(StopReason::EndTurn | StopReason::MaxTokens) => {
|
||||
let used_tools = self.thread.read(cx).used_tools_since_last_user_message();
|
||||
self.play_notification_sound(cx);
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification(
|
||||
if used_tools {
|
||||
"Finished running tools"
|
||||
@@ -1014,9 +1014,18 @@ impl ActiveThread {
|
||||
_ => {}
|
||||
},
|
||||
ThreadEvent::ToolConfirmationNeeded => {
|
||||
self.play_notification_sound(cx);
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification("Waiting for tool confirmation", IconName::Info, window, cx);
|
||||
}
|
||||
ThreadEvent::ToolUseLimitReached => {
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification(
|
||||
"Consecutive tool use limit reached.",
|
||||
IconName::Warning,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
ThreadEvent::StreamedAssistantText(message_id, text) => {
|
||||
if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) {
|
||||
rendered_message.append_text(text, cx);
|
||||
@@ -1135,6 +1144,10 @@ impl ActiveThread {
|
||||
cx,
|
||||
);
|
||||
}
|
||||
ThreadEvent::ProfileChanged => {
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1151,9 +1164,9 @@ impl ActiveThread {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn play_notification_sound(&self, cx: &mut App) {
|
||||
fn play_notification_sound(&self, window: &Window, cx: &mut App) {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
if settings.play_sound_when_agent_done {
|
||||
if settings.play_sound_when_agent_done && !window.is_window_active() {
|
||||
Audio::play_sound(Sound::AgentDone, cx);
|
||||
}
|
||||
}
|
||||
@@ -1339,6 +1352,7 @@ impl ActiveThread {
|
||||
Some(self.text_thread_store.downgrade()),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::File,
|
||||
ModelUsageContext::Thread(self.thread.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1508,31 +1522,7 @@ impl ActiveThread {
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
return;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
self.context_store.update(cx, |store, cx| {
|
||||
for image in images {
|
||||
store.add_image_instance(Arc::new(image), cx);
|
||||
}
|
||||
});
|
||||
attach_pasted_images_as_context(&self.context_store, cx);
|
||||
}
|
||||
|
||||
fn cancel_editing_message(
|
||||
@@ -1577,6 +1567,8 @@ impl ActiveThread {
|
||||
|
||||
let edited_text = state.editor.read(cx).text(cx);
|
||||
|
||||
let creases = state.editor.update(cx, extract_message_creases);
|
||||
|
||||
let new_context = self
|
||||
.context_store
|
||||
.read(cx)
|
||||
@@ -1601,6 +1593,7 @@ impl ActiveThread {
|
||||
message_id,
|
||||
Role::User,
|
||||
vec![MessageSegment::Text(edited_text)],
|
||||
creases,
|
||||
Some(context.loaded_context),
|
||||
checkpoint.ok(),
|
||||
cx,
|
||||
@@ -1795,12 +1788,31 @@ impl ActiveThread {
|
||||
|
||||
fn render_message(&self, ix: usize, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
|
||||
let message_id = self.messages[ix];
|
||||
let Some(message) = self.thread.read(cx).message(message_id) else {
|
||||
let workspace = self.workspace.clone();
|
||||
let thread = self.thread.read(cx);
|
||||
|
||||
let is_first_message = ix == 0;
|
||||
let is_last_message = ix == self.messages.len() - 1;
|
||||
|
||||
let Some(message) = thread.message(message_id) else {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let is_generating = thread.is_generating();
|
||||
let is_generating_stale = thread.is_generation_stale().unwrap_or(false);
|
||||
|
||||
let loading_dots = (is_generating && is_last_message).then(|| {
|
||||
h_flex()
|
||||
.h_8()
|
||||
.my_3()
|
||||
.mx_5()
|
||||
.when(is_generating_stale || message.is_hidden, |this| {
|
||||
this.child(AnimatedLabel::new("").size(LabelSize::Small))
|
||||
})
|
||||
});
|
||||
|
||||
if message.is_hidden {
|
||||
return Empty.into_any();
|
||||
return div().children(loading_dots).into_any();
|
||||
}
|
||||
|
||||
let message_creases = message.creases.clone();
|
||||
@@ -1809,26 +1821,16 @@ impl ActiveThread {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let workspace = self.workspace.clone();
|
||||
let thread = self.thread.read(cx);
|
||||
|
||||
// Get all the data we need from thread before we start using it in closures
|
||||
let checkpoint = thread.checkpoint_for_message(message_id);
|
||||
let configured_model = thread.configured_model().map(|m| m.model);
|
||||
let added_context = thread
|
||||
.context_for_message(message_id)
|
||||
.map(|context| AddedContext::new_attached(context, cx))
|
||||
.map(|context| AddedContext::new_attached(context, configured_model.as_ref(), cx))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let tool_uses = thread.tool_uses_for_message(message_id, cx);
|
||||
let has_tool_uses = !tool_uses.is_empty();
|
||||
let is_generating = thread.is_generating();
|
||||
let is_generating_stale = thread.is_generation_stale().unwrap_or(false);
|
||||
|
||||
let is_first_message = ix == 0;
|
||||
let is_last_message = ix == self.messages.len() - 1;
|
||||
|
||||
let loading_dots = (is_generating_stale && is_last_message)
|
||||
.then(|| AnimatedLabel::new("").size(LabelSize::Small));
|
||||
|
||||
let editing_message_state = self
|
||||
.editing_message
|
||||
@@ -2244,17 +2246,7 @@ impl ActiveThread {
|
||||
parent.child(self.render_rules_item(cx))
|
||||
})
|
||||
.child(styled_message)
|
||||
.when(is_generating && is_last_message, |this| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.h_8()
|
||||
.mt_2()
|
||||
.mb_4()
|
||||
.ml_4()
|
||||
.py_1p5()
|
||||
.when_some(loading_dots, |this, loading_dots| this.child(loading_dots)),
|
||||
)
|
||||
})
|
||||
.children(loading_dots)
|
||||
.when(show_feedback, move |parent| {
|
||||
parent.child(feedback_items).when_some(
|
||||
self.open_feedback_editors.get(&message_id),
|
||||
@@ -3639,6 +3631,38 @@ pub(crate) fn open_context(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn attach_pasted_images_as_context(
|
||||
context_store: &Entity<ContextStore>,
|
||||
cx: &mut App,
|
||||
) -> bool {
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
return false;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
context_store.update(cx, |store, cx| {
|
||||
for image in images {
|
||||
store.add_image_instance(Arc::new(image), cx);
|
||||
}
|
||||
});
|
||||
true
|
||||
}
|
||||
|
||||
fn open_editor_at_position(
|
||||
project_path: project::ProjectPath,
|
||||
target_position: Point,
|
||||
@@ -3668,10 +3692,13 @@ fn open_editor_at_position(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use assistant_tool::{ToolRegistry, ToolWorkingSet};
|
||||
use editor::EditorSettings;
|
||||
use editor::{EditorSettings, display_map::CreaseMetadata};
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext, TestAppContext, VisualTestContext};
|
||||
use language_model::{LanguageModel, fake_provider::FakeLanguageModel};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelRegistry,
|
||||
fake_provider::{FakeLanguageModel, FakeLanguageModelProvider},
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde_json::json;
|
||||
@@ -3732,6 +3759,87 @@ mod tests {
|
||||
assert!(!cx.read(|cx| workspace.read(cx).is_being_followed(CollaboratorId::Agent)));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_reinserting_creases_for_edited_message(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
|
||||
let (cx, active_thread, _, thread, model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
cx.update(|_, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_default_model(
|
||||
Some(ConfiguredModel {
|
||||
provider: Arc::new(FakeLanguageModelProvider),
|
||||
model,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
let creases = vec![MessageCrease {
|
||||
range: 14..22,
|
||||
metadata: CreaseMetadata {
|
||||
icon_path: "icon".into(),
|
||||
label: "foo.txt".into(),
|
||||
},
|
||||
context: None,
|
||||
}];
|
||||
|
||||
let message = thread.update(cx, |thread, cx| {
|
||||
let message_id = thread.insert_user_message(
|
||||
"Tell me about @foo.txt",
|
||||
ContextLoadResult::default(),
|
||||
None,
|
||||
creases,
|
||||
cx,
|
||||
);
|
||||
thread.message(message_id).cloned().unwrap()
|
||||
});
|
||||
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.1
|
||||
.editor
|
||||
.clone();
|
||||
editor.update(cx, |editor, cx| editor.edit([(0..13, "modified")], cx));
|
||||
active_thread.confirm_editing_message(&Default::default(), window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let message = thread.update(cx, |thread, _| thread.message(message.id).cloned().unwrap());
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.1
|
||||
.editor
|
||||
.clone();
|
||||
let text = editor.update(cx, |editor, cx| editor.text(cx));
|
||||
assert_eq!(text, "modified @foo.txt");
|
||||
});
|
||||
}
|
||||
|
||||
fn init_test_settings(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
|
||||
@@ -3,6 +3,7 @@ mod agent_configuration;
|
||||
mod agent_diff;
|
||||
mod agent_model_selector;
|
||||
mod agent_panel;
|
||||
mod agent_profile;
|
||||
mod buffer_codegen;
|
||||
mod context;
|
||||
mod context_picker;
|
||||
@@ -33,9 +34,11 @@ use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::Client;
|
||||
use feature_flags::FeatureFlagAppExt as _;
|
||||
use fs::Fs;
|
||||
use gpui::{App, actions, impl_actions};
|
||||
use gpui::{App, Entity, actions, impl_actions};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{LanguageModelId, LanguageModelProviderId, LanguageModelRegistry};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry,
|
||||
};
|
||||
use prompt_store::PromptBuilder;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
@@ -115,6 +118,28 @@ impl ManageProfiles {
|
||||
|
||||
impl_actions!(agent, [NewThread, ManageProfiles]);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum ModelUsageContext {
|
||||
Thread(Entity<Thread>),
|
||||
InlineAssistant,
|
||||
}
|
||||
|
||||
impl ModelUsageContext {
|
||||
pub fn configured_model(&self, cx: &App) -> Option<ConfiguredModel> {
|
||||
match self {
|
||||
Self::Thread(thread) => thread.read(cx).configured_model(),
|
||||
Self::InlineAssistant => {
|
||||
LanguageModelRegistry::read_global(cx).inline_assistant_model()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn language_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
|
||||
self.configured_model(cx)
|
||||
.map(|configured_model| configured_model.model)
|
||||
}
|
||||
}
|
||||
|
||||
/// Initializes the `agent` crate.
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
|
||||
@@ -12,7 +12,7 @@ use context_server::ContextServerId;
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, Animation, AnimationExt as _, AnyView, App, Entity, EventEmitter, FocusHandle,
|
||||
Focusable, ScrollHandle, Subscription, pulsating_between,
|
||||
Focusable, ScrollHandle, Subscription, Transformation, percentage,
|
||||
};
|
||||
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
@@ -475,7 +475,6 @@ impl AgentConfiguration {
|
||||
.get(&context_server_id)
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
let tools = tools_by_source
|
||||
.get(&ToolSource::ContextServer {
|
||||
id: context_server_id.0.clone().into(),
|
||||
@@ -484,25 +483,23 @@ impl AgentConfiguration {
|
||||
let tool_count = tools.len();
|
||||
|
||||
let border_color = cx.theme().colors().border.opacity(0.6);
|
||||
let success_color = Color::Success.color(cx);
|
||||
|
||||
let (status_indicator, tooltip_text) = match server_status {
|
||||
ContextServerStatus::Starting => (
|
||||
Indicator::dot()
|
||||
.color(Color::Success)
|
||||
Icon::new(IconName::LoadCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Accent)
|
||||
.with_animation(
|
||||
SharedString::from(format!("{}-starting", context_server_id.0.clone(),)),
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 1.)),
|
||||
move |this, delta| this.color(success_color.alpha(delta).into()),
|
||||
Animation::new(Duration::from_secs(3)).repeat(),
|
||||
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
|
||||
)
|
||||
.into_any_element(),
|
||||
"Server is starting.",
|
||||
),
|
||||
ContextServerStatus::Running => (
|
||||
Indicator::dot().color(Color::Success).into_any_element(),
|
||||
"Server is running.",
|
||||
"Server is active.",
|
||||
),
|
||||
ContextServerStatus::Error(_) => (
|
||||
Indicator::dot().color(Color::Error).into_any_element(),
|
||||
@@ -526,12 +523,11 @@ impl AgentConfiguration {
|
||||
.p_1()
|
||||
.justify_between()
|
||||
.when(
|
||||
error.is_some() || are_tools_expanded && tool_count > 1,
|
||||
error.is_some() || are_tools_expanded && tool_count >= 1,
|
||||
|element| element.border_b_1().border_color(border_color),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(
|
||||
Disclosure::new(
|
||||
"tool-list-disclosure",
|
||||
@@ -551,12 +547,16 @@ impl AgentConfiguration {
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id(item_id.clone())
|
||||
h_flex()
|
||||
.id(SharedString::from(format!("tooltip-{}", item_id)))
|
||||
.h_full()
|
||||
.w_3()
|
||||
.mx_1()
|
||||
.justify_center()
|
||||
.tooltip(Tooltip::text(tooltip_text))
|
||||
.child(status_indicator),
|
||||
)
|
||||
.child(Label::new(context_server_id.0.clone()).ml_0p5())
|
||||
.child(Label::new(item_id).ml_0p5().mr_1p5())
|
||||
.when(is_running, |this| {
|
||||
this.child(
|
||||
Label::new(if tool_count == 1 {
|
||||
|
||||
@@ -2,25 +2,21 @@ mod profile_modal_header;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, builtin_profiles};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use convert_case::{Case, Casing as _};
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, WeakEntity,
|
||||
prelude::*,
|
||||
};
|
||||
use settings::{Settings as _, update_settings_file};
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
|
||||
use settings::Settings as _;
|
||||
use ui::{
|
||||
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
|
||||
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
|
||||
use crate::{AgentPanel, ManageProfiles, ThreadStore};
|
||||
use crate::agent_profile::AgentProfile;
|
||||
use crate::{AgentPanel, ManageProfiles};
|
||||
|
||||
use super::tool_picker::ToolPickerMode;
|
||||
|
||||
@@ -103,7 +99,6 @@ pub struct NewProfileMode {
|
||||
pub struct ManageProfilesModal {
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
focus_handle: FocusHandle,
|
||||
mode: Mode,
|
||||
}
|
||||
@@ -119,9 +114,8 @@ impl ManageProfilesModal {
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let thread_store = panel.read(cx).thread_store();
|
||||
let tools = thread_store.read(cx).tools();
|
||||
let thread_store = thread_store.downgrade();
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
let mut this = Self::new(fs, tools, thread_store, window, cx);
|
||||
let mut this = Self::new(fs, tools, window, cx);
|
||||
|
||||
if let Some(profile_id) = action.customize_tools.clone() {
|
||||
this.configure_builtin_tools(profile_id, window, cx);
|
||||
@@ -136,7 +130,6 @@ impl ManageProfilesModal {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -145,7 +138,6 @@ impl ManageProfilesModal {
|
||||
Self {
|
||||
fs,
|
||||
tools,
|
||||
thread_store,
|
||||
focus_handle,
|
||||
mode: Mode::choose_profile(window, cx),
|
||||
}
|
||||
@@ -206,7 +198,6 @@ impl ManageProfilesModal {
|
||||
ToolPickerMode::McpTools,
|
||||
self.fs.clone(),
|
||||
self.tools.clone(),
|
||||
self.thread_store.clone(),
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
cx,
|
||||
@@ -244,7 +235,6 @@ impl ManageProfilesModal {
|
||||
ToolPickerMode::BuiltinTools,
|
||||
self.fs.clone(),
|
||||
self.tools.clone(),
|
||||
self.thread_store.clone(),
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
cx,
|
||||
@@ -270,32 +260,10 @@ impl ManageProfilesModal {
|
||||
match &self.mode {
|
||||
Mode::ChooseProfile { .. } => {}
|
||||
Mode::NewProfile(mode) => {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
let base_profile = mode
|
||||
.base_profile_id
|
||||
.as_ref()
|
||||
.and_then(|profile_id| settings.profiles.get(profile_id).cloned());
|
||||
|
||||
let name = mode.name_editor.read(cx).text(cx);
|
||||
let profile_id = AgentProfileId(name.to_case(Case::Kebab).into());
|
||||
|
||||
let profile = AgentProfile {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
self.create_profile(profile_id.clone(), profile, cx);
|
||||
let profile_id =
|
||||
AgentProfile::create(name, mode.base_profile_id.clone(), self.fs.clone(), cx);
|
||||
self.view_profile(profile_id, window, cx);
|
||||
}
|
||||
Mode::ViewProfile(_) => {}
|
||||
@@ -325,19 +293,6 @@ impl ManageProfilesModal {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_profile(
|
||||
&self,
|
||||
profile_id: AgentProfileId,
|
||||
profile: AgentProfile,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
update_settings_file::<AgentSettings>(self.fs.clone(), cx, {
|
||||
move |settings, _cx| {
|
||||
settings.create_profile(profile_id, profile).log_err();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ModalView for ManageProfilesModal {}
|
||||
@@ -520,14 +475,13 @@ impl ManageProfilesModal {
|
||||
) -> impl IntoElement {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
let profile_id = &settings.default_profile;
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(&mode.profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
let icon = match profile_id.as_str() {
|
||||
let icon = match mode.profile_id.as_str() {
|
||||
"write" => IconName::Pencil,
|
||||
"ask" => IconName::MessageBubbles,
|
||||
_ => IconName::UserRoundPen,
|
||||
|
||||
@@ -1,19 +1,17 @@
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use agent_settings::{
|
||||
AgentProfile, AgentProfileContent, AgentProfileId, AgentSettings, AgentSettingsContent,
|
||||
AgentProfileContent, AgentProfileId, AgentProfileSettings, AgentSettings, AgentSettingsContent,
|
||||
ContextServerPresetContent,
|
||||
};
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use fs::Fs;
|
||||
use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::{Settings as _, update_settings_file};
|
||||
use settings::update_settings_file;
|
||||
use ui::{ListItem, ListItemSpacing, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::ThreadStore;
|
||||
|
||||
pub struct ToolPicker {
|
||||
picker: Entity<Picker<ToolPickerDelegate>>,
|
||||
}
|
||||
@@ -71,11 +69,10 @@ pub enum PickerItem {
|
||||
|
||||
pub struct ToolPickerDelegate {
|
||||
tool_picker: WeakEntity<ToolPicker>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
fs: Arc<dyn Fs>,
|
||||
items: Arc<Vec<PickerItem>>,
|
||||
profile_id: AgentProfileId,
|
||||
profile: AgentProfile,
|
||||
profile_settings: AgentProfileSettings,
|
||||
filtered_items: Vec<PickerItem>,
|
||||
selected_index: usize,
|
||||
mode: ToolPickerMode,
|
||||
@@ -86,20 +83,18 @@ impl ToolPickerDelegate {
|
||||
mode: ToolPickerMode,
|
||||
fs: Arc<dyn Fs>,
|
||||
tool_set: Entity<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
profile_id: AgentProfileId,
|
||||
profile: AgentProfile,
|
||||
profile_settings: AgentProfileSettings,
|
||||
cx: &mut Context<ToolPicker>,
|
||||
) -> Self {
|
||||
let items = Arc::new(Self::resolve_items(mode, &tool_set, cx));
|
||||
|
||||
Self {
|
||||
tool_picker: cx.entity().downgrade(),
|
||||
thread_store,
|
||||
fs,
|
||||
items,
|
||||
profile_id,
|
||||
profile,
|
||||
profile_settings,
|
||||
filtered_items: Vec::new(),
|
||||
selected_index: 0,
|
||||
mode,
|
||||
@@ -249,28 +244,31 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
};
|
||||
|
||||
let is_currently_enabled = if let Some(server_id) = server_id.clone() {
|
||||
let preset = self.profile.context_servers.entry(server_id).or_default();
|
||||
let preset = self
|
||||
.profile_settings
|
||||
.context_servers
|
||||
.entry(server_id)
|
||||
.or_default();
|
||||
let is_enabled = *preset.tools.entry(tool_name.clone()).or_default();
|
||||
*preset.tools.entry(tool_name.clone()).or_default() = !is_enabled;
|
||||
is_enabled
|
||||
} else {
|
||||
let is_enabled = *self.profile.tools.entry(tool_name.clone()).or_default();
|
||||
*self.profile.tools.entry(tool_name.clone()).or_default() = !is_enabled;
|
||||
let is_enabled = *self
|
||||
.profile_settings
|
||||
.tools
|
||||
.entry(tool_name.clone())
|
||||
.or_default();
|
||||
*self
|
||||
.profile_settings
|
||||
.tools
|
||||
.entry(tool_name.clone())
|
||||
.or_default() = !is_enabled;
|
||||
is_enabled
|
||||
};
|
||||
|
||||
let active_profile_id = &AgentSettings::get_global(cx).default_profile;
|
||||
if active_profile_id == &self.profile_id {
|
||||
self.thread_store
|
||||
.update(cx, |this, cx| {
|
||||
this.load_profile(self.profile.clone(), cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
update_settings_file::<AgentSettings>(self.fs.clone(), cx, {
|
||||
let profile_id = self.profile_id.clone();
|
||||
let default_profile = self.profile.clone();
|
||||
let default_profile = self.profile_settings.clone();
|
||||
let server_id = server_id.clone();
|
||||
let tool_name = tool_name.clone();
|
||||
move |settings: &mut AgentSettingsContent, _cx| {
|
||||
@@ -348,14 +346,18 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
),
|
||||
PickerItem::Tool { name, server_id } => {
|
||||
let is_enabled = if let Some(server_id) = server_id {
|
||||
self.profile
|
||||
self.profile_settings
|
||||
.context_servers
|
||||
.get(server_id.as_ref())
|
||||
.and_then(|preset| preset.tools.get(name))
|
||||
.copied()
|
||||
.unwrap_or(self.profile.enable_all_context_servers)
|
||||
.unwrap_or(self.profile_settings.enable_all_context_servers)
|
||||
} else {
|
||||
self.profile.tools.get(name).copied().unwrap_or(false)
|
||||
self.profile_settings
|
||||
.tools
|
||||
.get(name)
|
||||
.copied()
|
||||
.unwrap_or(false)
|
||||
};
|
||||
|
||||
Some(
|
||||
|
||||
@@ -1086,7 +1086,7 @@ impl Render for AgentDiffToolbar {
|
||||
.child(vertical_divider())
|
||||
.when_some(editor.read(cx).workspace(), |this, _workspace| {
|
||||
this.child(
|
||||
IconButton::new("review", IconName::ListCollapse)
|
||||
IconButton::new("review", IconName::ListTodo)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::for_action_title_in(
|
||||
"Review All Files",
|
||||
@@ -1116,8 +1116,13 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let is_generating = agent_diff.read(cx).thread.read(cx).is_generating();
|
||||
if is_generating {
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_uses();
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
}
|
||||
|
||||
@@ -1372,7 +1377,9 @@ impl AgentDiff {
|
||||
| ThreadEvent::ToolFinished { .. }
|
||||
| ThreadEvent::CheckpointChanged
|
||||
| ThreadEvent::ToolConfirmationNeeded
|
||||
| ThreadEvent::CancelEditing => {}
|
||||
| ThreadEvent::ToolUseLimitReached
|
||||
| ThreadEvent::CancelEditing
|
||||
| ThreadEvent::ProfileChanged => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1464,7 +1471,10 @@ impl AgentDiff {
|
||||
if !AgentSettings::get_global(cx).single_file_review {
|
||||
for (editor, _) in self.reviewing_editors.drain() {
|
||||
editor
|
||||
.update(cx, |editor, cx| editor.end_temporary_diff_override(cx))
|
||||
.update(cx, |editor, cx| {
|
||||
editor.end_temporary_diff_override(cx);
|
||||
editor.unregister_addon::<EditorAgentDiffAddon>();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
return;
|
||||
@@ -1503,7 +1513,7 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let new_state = if thread.read(cx).is_generating() {
|
||||
let new_state = if thread.read(cx).has_pending_edit_tool_uses() {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
@@ -1560,7 +1570,10 @@ impl AgentDiff {
|
||||
|
||||
if in_workspace {
|
||||
editor
|
||||
.update(cx, |editor, cx| editor.end_temporary_diff_override(cx))
|
||||
.update(cx, |editor, cx| {
|
||||
editor.end_temporary_diff_override(cx);
|
||||
editor.unregister_addon::<EditorAgentDiffAddon>();
|
||||
})
|
||||
.ok();
|
||||
self.reviewing_editors.remove(&editor);
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use fs::Fs;
|
||||
use gpui::{Entity, FocusHandle, SharedString};
|
||||
use picker::popover_menu::PickerPopoverMenu;
|
||||
|
||||
use crate::Thread;
|
||||
use crate::ModelUsageContext;
|
||||
use assistant_context_editor::language_model_selector::{
|
||||
LanguageModelSelector, ToggleModelSelector, language_model_selector,
|
||||
};
|
||||
@@ -12,12 +12,6 @@ use settings::update_settings_file;
|
||||
use std::sync::Arc;
|
||||
use ui::{PopoverMenuHandle, Tooltip, prelude::*};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum ModelType {
|
||||
Default(Entity<Thread>),
|
||||
InlineAssistant,
|
||||
}
|
||||
|
||||
pub struct AgentModelSelector {
|
||||
selector: Entity<LanguageModelSelector>,
|
||||
menu_handle: PopoverMenuHandle<LanguageModelSelector>,
|
||||
@@ -29,7 +23,7 @@ impl AgentModelSelector {
|
||||
fs: Arc<dyn Fs>,
|
||||
menu_handle: PopoverMenuHandle<LanguageModelSelector>,
|
||||
focus_handle: FocusHandle,
|
||||
model_type: ModelType,
|
||||
model_usage_context: ModelUsageContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -38,19 +32,14 @@ impl AgentModelSelector {
|
||||
let fs = fs.clone();
|
||||
language_model_selector(
|
||||
{
|
||||
let model_type = model_type.clone();
|
||||
move |cx| match &model_type {
|
||||
ModelType::Default(thread) => thread.read(cx).configured_model(),
|
||||
ModelType::InlineAssistant => {
|
||||
LanguageModelRegistry::read_global(cx).inline_assistant_model()
|
||||
}
|
||||
}
|
||||
let model_context = model_usage_context.clone();
|
||||
move |cx| model_context.configured_model(cx)
|
||||
},
|
||||
move |model, cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model_id = model.id().0.to_string();
|
||||
match &model_type {
|
||||
ModelType::Default(thread) => {
|
||||
match &model_usage_context {
|
||||
ModelUsageContext::Thread(thread) => {
|
||||
thread.update(cx, |thread, cx| {
|
||||
let registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(provider) = registry.provider(&model.provider_id())
|
||||
@@ -72,7 +61,7 @@ impl AgentModelSelector {
|
||||
},
|
||||
);
|
||||
}
|
||||
ModelType::InlineAssistant => {
|
||||
ModelUsageContext::InlineAssistant => {
|
||||
update_settings_file::<AgentSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
|
||||
@@ -57,7 +57,7 @@ use zed_llm_client::{CompletionIntent, UsageLimit};
|
||||
use crate::active_thread::{self, ActiveThread, ActiveThreadEvent};
|
||||
use crate::agent_configuration::{AgentConfiguration, AssistantConfigurationEvent};
|
||||
use crate::agent_diff::AgentDiff;
|
||||
use crate::history_store::{HistoryStore, RecentEntry};
|
||||
use crate::history_store::{HistoryEntryId, HistoryStore};
|
||||
use crate::message_editor::{MessageEditor, MessageEditorEvent};
|
||||
use crate::thread::{Thread, ThreadError, ThreadId, ThreadSummary, TokenUsageRatio};
|
||||
use crate::thread_history::{HistoryEntryElement, ThreadHistory};
|
||||
@@ -257,6 +257,7 @@ impl ActiveView {
|
||||
|
||||
pub fn prompt_editor(
|
||||
context_editor: Entity<ContextEditor>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -322,6 +323,19 @@ impl ActiveView {
|
||||
editor.set_text(summary, window, cx);
|
||||
})
|
||||
}
|
||||
ContextEvent::PathChanged { old_path, new_path } => {
|
||||
history_store.update(cx, |history_store, cx| {
|
||||
if let Some(old_path) = old_path {
|
||||
history_store
|
||||
.replace_recently_opened_text_thread(old_path, new_path, cx);
|
||||
} else {
|
||||
history_store.push_recently_opened_entry(
|
||||
HistoryEntryId::Context(new_path.clone()),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}),
|
||||
@@ -516,8 +530,7 @@ impl AgentPanel {
|
||||
HistoryStore::new(
|
||||
thread_store.clone(),
|
||||
context_store.clone(),
|
||||
[RecentEntry::Thread(thread_id, thread.clone())],
|
||||
window,
|
||||
[HistoryEntryId::Thread(thread_id)],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -544,7 +557,13 @@ impl AgentPanel {
|
||||
editor.insert_default_prompt(window, cx);
|
||||
editor
|
||||
});
|
||||
ActiveView::prompt_editor(context_editor, language_registry.clone(), window, cx)
|
||||
ActiveView::prompt_editor(
|
||||
context_editor,
|
||||
history_store.clone(),
|
||||
language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -581,86 +600,9 @@ impl AgentPanel {
|
||||
let panel = weak_panel.clone();
|
||||
let assistant_navigation_menu =
|
||||
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
|
||||
let recently_opened = panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(cx, |history_store, cx| {
|
||||
history_store.recently_opened_entries(cx)
|
||||
})
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if !recently_opened.is_empty() {
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in recently_opened.iter() {
|
||||
if let RecentEntry::Context(context) = entry {
|
||||
if context.read(cx).path().is_none() {
|
||||
log::error!(
|
||||
"bug: text thread in recent history list was never saved"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let summary = entry.summary(cx);
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
summary,
|
||||
None,
|
||||
{
|
||||
let panel = panel.clone();
|
||||
let entry = entry.clone();
|
||||
move |window, cx| {
|
||||
panel
|
||||
.update(cx, {
|
||||
let entry = entry.clone();
|
||||
move |this, cx| match entry {
|
||||
RecentEntry::Thread(_, thread) => {
|
||||
this.open_thread(thread, window, cx)
|
||||
}
|
||||
RecentEntry::Context(context) => {
|
||||
let Some(path) = context.read(cx).path()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
this.open_saved_prompt_editor(
|
||||
path.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
IconName::Close,
|
||||
"Close Entry".into(),
|
||||
{
|
||||
let panel = panel.clone();
|
||||
let entry = entry.clone();
|
||||
move |_window, cx| {
|
||||
panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(
|
||||
cx,
|
||||
|history_store, cx| {
|
||||
history_store.remove_recently_opened_entry(
|
||||
&entry, cx,
|
||||
);
|
||||
},
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
if let Some(panel) = panel.upgrade() {
|
||||
menu = Self::populate_recently_opened_menu_section(menu, panel, cx);
|
||||
}
|
||||
|
||||
menu.action("View All", Box::new(OpenHistory))
|
||||
.end_slot_action(DeleteRecentlyOpenThread.boxed_clone())
|
||||
.fixed_width(px(320.).into())
|
||||
@@ -898,6 +840,7 @@ impl AgentPanel {
|
||||
self.set_active_view(
|
||||
ActiveView::prompt_editor(
|
||||
context_editor.clone(),
|
||||
self.history_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -984,7 +927,13 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
self.set_active_view(
|
||||
ActiveView::prompt_editor(editor.clone(), self.language_registry.clone(), window, cx),
|
||||
ActiveView::prompt_editor(
|
||||
editor.clone(),
|
||||
self.history_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -1383,16 +1332,6 @@ impl AgentPanel {
|
||||
}
|
||||
}
|
||||
}
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
let context = context_editor.read(cx).context();
|
||||
// When switching away from an unsaved text thread, delete its entry.
|
||||
if context.read(cx).path().is_none() {
|
||||
let context = context.clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_entry(&RecentEntry::Context(context), cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -1400,13 +1339,14 @@ impl AgentPanel {
|
||||
ActiveView::Thread { thread, .. } => self.history_store.update(cx, |store, cx| {
|
||||
if let Some(thread) = thread.upgrade() {
|
||||
let id = thread.read(cx).id().clone();
|
||||
store.push_recently_opened_entry(RecentEntry::Thread(id, thread), cx);
|
||||
store.push_recently_opened_entry(HistoryEntryId::Thread(id), cx);
|
||||
}
|
||||
}),
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
let context = context_editor.read(cx).context().clone();
|
||||
store.push_recently_opened_entry(RecentEntry::Context(context), cx)
|
||||
if let Some(path) = context_editor.read(cx).context().read(cx).path() {
|
||||
store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx)
|
||||
}
|
||||
})
|
||||
}
|
||||
_ => {}
|
||||
@@ -1425,6 +1365,70 @@ impl AgentPanel {
|
||||
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn populate_recently_opened_menu_section(
|
||||
mut menu: ContextMenu,
|
||||
panel: Entity<Self>,
|
||||
cx: &mut Context<ContextMenu>,
|
||||
) -> ContextMenu {
|
||||
let entries = panel
|
||||
.read(cx)
|
||||
.history_store
|
||||
.read(cx)
|
||||
.recently_opened_entries(cx);
|
||||
|
||||
if entries.is_empty() {
|
||||
return menu;
|
||||
}
|
||||
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in entries {
|
||||
let title = entry.title().clone();
|
||||
let id = entry.id();
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
title,
|
||||
None,
|
||||
{
|
||||
let panel = panel.downgrade();
|
||||
let id = id.clone();
|
||||
move |window, cx| {
|
||||
let id = id.clone();
|
||||
panel
|
||||
.update(cx, move |this, cx| match id {
|
||||
HistoryEntryId::Thread(id) => this
|
||||
.open_thread_by_id(&id, window, cx)
|
||||
.detach_and_log_err(cx),
|
||||
HistoryEntryId::Context(path) => this
|
||||
.open_saved_prompt_editor(path.clone(), window, cx)
|
||||
.detach_and_log_err(cx),
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
IconName::Close,
|
||||
"Close Entry".into(),
|
||||
{
|
||||
let panel = panel.downgrade();
|
||||
let id = id.clone();
|
||||
move |_window, cx| {
|
||||
panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(cx, |history_store, cx| {
|
||||
history_store.remove_recently_opened_entry(&id, cx);
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
|
||||
menu
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for AgentPanel {
|
||||
|
||||
334
crates/agent/src/agent_profile.rs
Normal file
@@ -0,0 +1,334 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings};
|
||||
use assistant_tool::{Tool, ToolSource, ToolWorkingSet};
|
||||
use collections::IndexMap;
|
||||
use convert_case::{Case, Casing};
|
||||
use fs::Fs;
|
||||
use gpui::{App, Entity};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::SharedString;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct AgentProfile {
|
||||
id: AgentProfileId,
|
||||
tool_set: Entity<ToolWorkingSet>,
|
||||
}
|
||||
|
||||
pub type AvailableProfiles = IndexMap<AgentProfileId, SharedString>;
|
||||
|
||||
impl AgentProfile {
|
||||
pub fn new(id: AgentProfileId, tool_set: Entity<ToolWorkingSet>) -> Self {
|
||||
Self { id, tool_set }
|
||||
}
|
||||
|
||||
/// Saves a new profile to the settings.
|
||||
pub fn create(
|
||||
name: String,
|
||||
base_profile_id: Option<AgentProfileId>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &App,
|
||||
) -> AgentProfileId {
|
||||
let id = AgentProfileId(name.to_case(Case::Kebab).into());
|
||||
|
||||
let base_profile =
|
||||
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
|
||||
|
||||
let profile_settings = AgentProfileSettings {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
update_settings_file::<AgentSettings>(fs, cx, {
|
||||
let id = id.clone();
|
||||
move |settings, _cx| {
|
||||
settings.create_profile(id, profile_settings).log_err();
|
||||
}
|
||||
});
|
||||
|
||||
id
|
||||
}
|
||||
|
||||
/// Returns a map of AgentProfileIds to their names
|
||||
pub fn available_profiles(cx: &App) -> AvailableProfiles {
|
||||
let mut profiles = AvailableProfiles::default();
|
||||
for (id, profile) in AgentSettings::get_global(cx).profiles.iter() {
|
||||
profiles.insert(id.clone(), profile.name.clone());
|
||||
}
|
||||
profiles
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &AgentProfileId {
|
||||
&self.id
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
self.tool_set
|
||||
.read(cx)
|
||||
.tools(cx)
|
||||
.into_iter()
|
||||
.filter(|tool| Self::is_enabled(settings, tool.source(), tool.name()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
|
||||
match source {
|
||||
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
|
||||
ToolSource::ContextServer { id } => {
|
||||
if settings.enable_all_context_servers {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(preset) = settings.context_servers.get(id.as_ref()) else {
|
||||
return false;
|
||||
};
|
||||
*preset.tools.get(name.as_str()).unwrap_or(&false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use agent_settings::ContextServerPreset;
|
||||
use assistant_tool::ToolRegistry;
|
||||
use collections::IndexMap;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use http_client::FakeHttpClient;
|
||||
use project::Project;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use ui::SharedString;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_enabled_built_in_tools_for_profile(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let id = AgentProfileId::default();
|
||||
let profile_settings = cx.read(|cx| {
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
});
|
||||
let tool_set = default_tool_set(cx);
|
||||
|
||||
let profile = AgentProfile::new(id.clone(), tool_set);
|
||||
|
||||
let mut enabled_tools = cx
|
||||
.read(|cx| profile.enabled_tools(cx))
|
||||
.into_iter()
|
||||
.map(|tool| tool.name())
|
||||
.collect::<Vec<_>>();
|
||||
enabled_tools.sort();
|
||||
|
||||
let mut expected_tools = profile_settings
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
|
||||
// Provider dependent
|
||||
.filter(|tool| tool != "web_search")
|
||||
.collect::<Vec<_>>();
|
||||
// Plus all registered MCP tools
|
||||
expected_tools.extend(["enabled_mcp_tool".into(), "disabled_mcp_tool".into()]);
|
||||
expected_tools.sort();
|
||||
|
||||
assert_eq!(enabled_tools, expected_tools);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_custom_mcp_settings(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let id = AgentProfileId("custom_mcp".into());
|
||||
let profile_settings = cx.read(|cx| {
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
});
|
||||
let tool_set = default_tool_set(cx);
|
||||
|
||||
let profile = AgentProfile::new(id.clone(), tool_set);
|
||||
|
||||
let mut enabled_tools = cx
|
||||
.read(|cx| profile.enabled_tools(cx))
|
||||
.into_iter()
|
||||
.map(|tool| tool.name())
|
||||
.collect::<Vec<_>>();
|
||||
enabled_tools.sort();
|
||||
|
||||
let mut expected_tools = profile_settings.context_servers["mcp"]
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(key, enabled)| enabled.then(|| key.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
expected_tools.sort();
|
||||
|
||||
assert_eq!(enabled_tools, expected_tools);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_only_built_in(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let id = AgentProfileId("write_minus_mcp".into());
|
||||
let profile_settings = cx.read(|cx| {
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
});
|
||||
let tool_set = default_tool_set(cx);
|
||||
|
||||
let profile = AgentProfile::new(id.clone(), tool_set);
|
||||
|
||||
let mut enabled_tools = cx
|
||||
.read(|cx| profile.enabled_tools(cx))
|
||||
.into_iter()
|
||||
.map(|tool| tool.name())
|
||||
.collect::<Vec<_>>();
|
||||
enabled_tools.sort();
|
||||
|
||||
let mut expected_tools = profile_settings
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
|
||||
// Provider dependent
|
||||
.filter(|tool| tool != "web_search")
|
||||
.collect::<Vec<_>>();
|
||||
expected_tools.sort();
|
||||
|
||||
assert_eq!(enabled_tools, expected_tools);
|
||||
}
|
||||
|
||||
fn init_test_settings(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
ToolRegistry::default_global(cx);
|
||||
assistant_tools::init(FakeHttpClient::with_404_response(), cx);
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
let mut agent_settings = AgentSettings::get_global(cx).clone();
|
||||
agent_settings.profiles.insert(
|
||||
AgentProfileId("write_minus_mcp".into()),
|
||||
AgentProfileSettings {
|
||||
name: "write_minus_mcp".into(),
|
||||
enable_all_context_servers: false,
|
||||
..agent_settings.profiles[&AgentProfileId::default()].clone()
|
||||
},
|
||||
);
|
||||
agent_settings.profiles.insert(
|
||||
AgentProfileId("custom_mcp".into()),
|
||||
AgentProfileSettings {
|
||||
name: "mcp".into(),
|
||||
tools: IndexMap::default(),
|
||||
enable_all_context_servers: false,
|
||||
context_servers: IndexMap::from_iter([("mcp".into(), context_server_preset())]),
|
||||
},
|
||||
);
|
||||
AgentSettings::override_global(agent_settings, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn context_server_preset() -> ContextServerPreset {
|
||||
ContextServerPreset {
|
||||
tools: IndexMap::from_iter([
|
||||
("enabled_mcp_tool".into(), true),
|
||||
("disabled_mcp_tool".into(), false),
|
||||
]),
|
||||
}
|
||||
}
|
||||
|
||||
fn default_tool_set(cx: &mut TestAppContext) -> Entity<ToolWorkingSet> {
|
||||
cx.new(|_| {
|
||||
let mut tool_set = ToolWorkingSet::default();
|
||||
tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")));
|
||||
tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")));
|
||||
tool_set
|
||||
})
|
||||
}
|
||||
|
||||
struct FakeTool {
|
||||
name: String,
|
||||
source: SharedString,
|
||||
}
|
||||
|
||||
impl FakeTool {
|
||||
fn new(name: impl Into<String>, source: impl Into<SharedString>) -> Self {
|
||||
Self {
|
||||
name: name.into(),
|
||||
source: source.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tool for FakeTool {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn source(&self) -> ToolSource {
|
||||
ToolSource::ContextServer {
|
||||
id: self.source.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn icon(&self) -> ui::IconName {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn ui_text(&self, _input: &serde_json::Value) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_input: serde_json::Value,
|
||||
_request: Arc<language_model::LanguageModelRequest>,
|
||||
_project: Entity<Project>,
|
||||
_action_log: Entity<assistant_tool::ActionLog>,
|
||||
_model: Arc<dyn language_model::LanguageModel>,
|
||||
_window: Option<gpui::AnyWindowHandle>,
|
||||
_cx: &mut App,
|
||||
) -> assistant_tool::ToolResult {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -386,8 +386,10 @@ impl CodegenAlternative {
|
||||
async { Ok(LanguageModelTextStream::default()) }.boxed_local()
|
||||
} else {
|
||||
let request = self.build_request(&model, user_prompt, cx)?;
|
||||
cx.spawn(async move |_, cx| model.stream_completion_text(request.await, &cx).await)
|
||||
.boxed_local()
|
||||
cx.spawn(async move |_, cx| {
|
||||
Ok(model.stream_completion_text(request.await, &cx).await?)
|
||||
})
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
Ok(())
|
||||
|
||||
@@ -734,6 +734,7 @@ impl Display for RulesContext {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImageContext {
|
||||
pub project_path: Option<ProjectPath>,
|
||||
pub full_path: Option<Arc<Path>>,
|
||||
pub original_image: Arc<gpui::Image>,
|
||||
// TODO: handle this elsewhere and remove `ignore-interior-mutability` opt-out in clippy.toml
|
||||
// needed due to a false positive of `clippy::mutable_key_type`.
|
||||
@@ -744,6 +745,7 @@ pub struct ImageContext {
|
||||
pub enum ImageStatus {
|
||||
Loading,
|
||||
Error,
|
||||
Warning,
|
||||
Ready,
|
||||
}
|
||||
|
||||
@@ -760,11 +762,17 @@ impl ImageContext {
|
||||
self.image_task.clone().now_or_never().flatten()
|
||||
}
|
||||
|
||||
pub fn status(&self) -> ImageStatus {
|
||||
pub fn status(&self, model: Option<&Arc<dyn language_model::LanguageModel>>) -> ImageStatus {
|
||||
match self.image_task.clone().now_or_never() {
|
||||
None => ImageStatus::Loading,
|
||||
Some(None) => ImageStatus::Error,
|
||||
Some(Some(_)) => ImageStatus::Ready,
|
||||
Some(Some(_)) => {
|
||||
if model.is_some_and(|model| !model.supports_images()) {
|
||||
ImageStatus::Warning
|
||||
} else {
|
||||
ImageStatus::Ready
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
@@ -14,7 +12,7 @@ use http_client::HttpClientWithUrl;
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::{Completion, CompletionIntent, ProjectPath, Symbol, WorktreeId};
|
||||
use project::{Completion, CompletionIntent, CompletionResponse, ProjectPath, Symbol, WorktreeId};
|
||||
use prompt_store::PromptStore;
|
||||
use rope::Point;
|
||||
use text::{Anchor, OffsetRangeExt, ToPoint};
|
||||
@@ -746,7 +744,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
_trigger: CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let state = buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
let line_start = Point::new(position.row, 0);
|
||||
@@ -756,18 +754,18 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
MentionCompletion::try_parse(line, offset_to_line)
|
||||
});
|
||||
let Some(state) = state else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let Some((workspace, context_store)) =
|
||||
self.workspace.upgrade().zip(self.context_store.upgrade())
|
||||
else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let source_range = snapshot.anchor_before(state.source_range.start)
|
||||
..snapshot.anchor_before(state.source_range.end);
|
||||
..snapshot.anchor_after(state.source_range.end);
|
||||
|
||||
let thread_store = self.thread_store.clone();
|
||||
let text_thread_store = self.text_thread_store.clone();
|
||||
@@ -815,10 +813,10 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let matches = search_task.await;
|
||||
let Some(editor) = editor.upgrade() else {
|
||||
return Ok(None);
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
Ok(Some(cx.update(|cx| {
|
||||
let completions = cx.update(|cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| match mat {
|
||||
@@ -901,26 +899,24 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
),
|
||||
})
|
||||
.collect()
|
||||
})?))
|
||||
})
|
||||
}
|
||||
})?;
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_completion_indices: Vec<usize>,
|
||||
_completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> Task<Result<bool>> {
|
||||
Task::ready(Ok(true))
|
||||
Ok(vec![CompletionResponse {
|
||||
completions,
|
||||
// Since this does its own filtering (see `filter_completions()` returns false),
|
||||
// there is no benefit to computing whether this set of completions is incomplete.
|
||||
is_incomplete: true,
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
fn is_completion_trigger(
|
||||
&self,
|
||||
buffer: &Entity<language::Buffer>,
|
||||
position: language::Anchor,
|
||||
_: &str,
|
||||
_: bool,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let buffer = buffer.read(cx);
|
||||
@@ -1069,7 +1065,7 @@ mod tests {
|
||||
use project::{Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::ops::Deref;
|
||||
use std::{ops::Deref, rc::Rc};
|
||||
use util::{path, separator};
|
||||
use workspace::{AppState, Item};
|
||||
|
||||
|
||||
@@ -282,15 +282,18 @@ pub fn unordered_thread_entries(
|
||||
text_thread_store: Entity<TextThreadStore>,
|
||||
cx: &App,
|
||||
) -> impl Iterator<Item = (DateTime<Utc>, ThreadContextEntry)> {
|
||||
let threads = thread_store.read(cx).unordered_threads().map(|thread| {
|
||||
(
|
||||
thread.updated_at,
|
||||
ThreadContextEntry::Thread {
|
||||
id: thread.id.clone(),
|
||||
title: thread.summary.clone(),
|
||||
},
|
||||
)
|
||||
});
|
||||
let threads = thread_store
|
||||
.read(cx)
|
||||
.reverse_chronological_threads()
|
||||
.map(|thread| {
|
||||
(
|
||||
thread.updated_at,
|
||||
ThreadContextEntry::Thread {
|
||||
id: thread.id.clone(),
|
||||
title: thread.summary.clone(),
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let text_threads = text_thread_store
|
||||
.read(cx)
|
||||
@@ -300,7 +303,7 @@ pub fn unordered_thread_entries(
|
||||
context.mtime.to_utc(),
|
||||
ThreadContextEntry::Context {
|
||||
path: context.path.clone(),
|
||||
title: context.title.clone().into(),
|
||||
title: context.title.clone(),
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
@@ -51,6 +51,10 @@ impl Tool for ContextServerTool {
|
||||
true
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
let mut schema = self.tool.input_schema.clone();
|
||||
assistant_tool::adapt_schema_to_format(&mut schema, format)?;
|
||||
@@ -100,7 +104,15 @@ impl Tool for ContextServerTool {
|
||||
tool_name,
|
||||
arguments
|
||||
);
|
||||
let response = protocol.run_tool(tool_name, arguments).await?;
|
||||
let response = protocol
|
||||
.request::<context_server::types::requests::CallTool>(
|
||||
context_server::types::CallToolParams {
|
||||
name: tool_name,
|
||||
arguments,
|
||||
meta: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut result = String::new();
|
||||
for content in response.content {
|
||||
@@ -111,6 +123,9 @@ impl Tool for ContextServerTool {
|
||||
types::ToolResponseContent::Image { .. } => {
|
||||
log::warn!("Ignoring image content from tool response");
|
||||
}
|
||||
types::ToolResponseContent::Audio { .. } => {
|
||||
log::warn!("Ignoring audio content from tool response");
|
||||
}
|
||||
types::ToolResponseContent::Resource { .. } => {
|
||||
log::warn!("Ignoring resource content from tool response");
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ use assistant_context_editor::AssistantContext;
|
||||
use collections::{HashSet, IndexSet};
|
||||
use futures::{self, FutureExt};
|
||||
use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity};
|
||||
use language::Buffer;
|
||||
use language::{Buffer, File as _};
|
||||
use language_model::LanguageModelImage;
|
||||
use project::image_store::is_image_file;
|
||||
use project::{Project, ProjectItem, ProjectPath, Symbol};
|
||||
@@ -304,11 +304,13 @@ impl ContextStore {
|
||||
project.open_image(project_path.clone(), cx)
|
||||
})?;
|
||||
let image_item = open_image_task.await?;
|
||||
let image = image_item.read_with(cx, |image_item, _| image_item.image.clone())?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let item = image_item.read(cx);
|
||||
this.insert_image(
|
||||
Some(image_item.read(cx).project_path(cx)),
|
||||
image,
|
||||
Some(item.project_path(cx)),
|
||||
Some(item.file.full_path(cx).into()),
|
||||
item.image.clone(),
|
||||
remove_if_exists,
|
||||
cx,
|
||||
)
|
||||
@@ -317,12 +319,13 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
pub fn add_image_instance(&mut self, image: Arc<Image>, cx: &mut Context<ContextStore>) {
|
||||
self.insert_image(None, image, false, cx);
|
||||
self.insert_image(None, None, image, false, cx);
|
||||
}
|
||||
|
||||
fn insert_image(
|
||||
&mut self,
|
||||
project_path: Option<ProjectPath>,
|
||||
full_path: Option<Arc<Path>>,
|
||||
image: Arc<Image>,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<ContextStore>,
|
||||
@@ -330,6 +333,7 @@ impl ContextStore {
|
||||
let image_task = LanguageModelImage::from_image(image.clone(), cx).shared();
|
||||
let context = AgentContextHandle::Image(ImageContext {
|
||||
project_path,
|
||||
full_path,
|
||||
original_image: image,
|
||||
image_task,
|
||||
context_id: self.next_context_id.post_inc(),
|
||||
|
||||
@@ -23,7 +23,7 @@ use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::ui::{AddedContext, ContextPill};
|
||||
use crate::{
|
||||
AcceptSuggestedContext, AgentPanel, FocusDown, FocusLeft, FocusRight, FocusUp,
|
||||
RemoveAllContext, RemoveFocusedContext, ToggleContextPicker,
|
||||
ModelUsageContext, RemoveAllContext, RemoveFocusedContext, ToggleContextPicker,
|
||||
};
|
||||
|
||||
pub struct ContextStrip {
|
||||
@@ -37,6 +37,7 @@ pub struct ContextStrip {
|
||||
_subscriptions: Vec<Subscription>,
|
||||
focused_index: Option<usize>,
|
||||
children_bounds: Option<Vec<Bounds<Pixels>>>,
|
||||
model_usage_context: ModelUsageContext,
|
||||
}
|
||||
|
||||
impl ContextStrip {
|
||||
@@ -47,6 +48,7 @@ impl ContextStrip {
|
||||
text_thread_store: Option<WeakEntity<TextThreadStore>>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
model_usage_context: ModelUsageContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -81,6 +83,7 @@ impl ContextStrip {
|
||||
_subscriptions: subscriptions,
|
||||
focused_index: None,
|
||||
children_bounds: None,
|
||||
model_usage_context,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,11 +101,20 @@ impl ContextStrip {
|
||||
.as_ref()
|
||||
.and_then(|thread_store| thread_store.upgrade())
|
||||
.and_then(|thread_store| thread_store.read(cx).prompt_store().as_ref());
|
||||
|
||||
let current_model = self.model_usage_context.language_model(cx);
|
||||
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.context()
|
||||
.flat_map(|context| {
|
||||
AddedContext::new_pending(context.clone(), prompt_store, project, cx)
|
||||
AddedContext::new_pending(
|
||||
context.clone(),
|
||||
prompt_store,
|
||||
project,
|
||||
current_model.as_ref(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
use std::{collections::VecDeque, path::Path, sync::Arc};
|
||||
|
||||
use anyhow::Context as _;
|
||||
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_context_editor::SavedContextMetadata;
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::future::{TryFutureExt as _, join_all};
|
||||
use gpui::{Entity, Task, prelude::*};
|
||||
use gpui::{AsyncApp, Entity, SharedString, Task, prelude::*};
|
||||
use itertools::Itertools;
|
||||
use paths::contexts_dir;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smol::future::FutureExt;
|
||||
use std::time::Duration;
|
||||
use ui::{App, SharedString, Window};
|
||||
use ui::App;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{
|
||||
Thread,
|
||||
thread::ThreadId,
|
||||
thread_store::{SerializedThreadMetadata, ThreadStore},
|
||||
};
|
||||
@@ -41,52 +40,34 @@ impl HistoryEntry {
|
||||
HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn title(&self) -> &SharedString {
|
||||
match self {
|
||||
HistoryEntry::Thread(thread) => &thread.summary,
|
||||
HistoryEntry::Context(context) => &context.title,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generic identifier for a history entry.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum HistoryEntryId {
|
||||
Thread(ThreadId),
|
||||
Context(Arc<Path>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum RecentEntry {
|
||||
Thread(ThreadId, Entity<Thread>),
|
||||
Context(Entity<AssistantContext>),
|
||||
}
|
||||
|
||||
impl PartialEq for RecentEntry {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Thread(l0, _), Self::Thread(r0, _)) => l0 == r0,
|
||||
(Self::Context(l0), Self::Context(r0)) => l0 == r0,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RecentEntry {}
|
||||
|
||||
impl RecentEntry {
|
||||
pub(crate) fn summary(&self, cx: &App) -> SharedString {
|
||||
match self {
|
||||
RecentEntry::Thread(_, thread) => thread.read(cx).summary().or_default(),
|
||||
RecentEntry::Context(context) => context.read(cx).summary().or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SerializedRecentEntry {
|
||||
enum SerializedRecentOpen {
|
||||
Thread(String),
|
||||
ContextName(String),
|
||||
/// Old format which stores the full path
|
||||
Context(String),
|
||||
}
|
||||
|
||||
pub struct HistoryStore {
|
||||
thread_store: Entity<ThreadStore>,
|
||||
context_store: Entity<assistant_context_editor::ContextStore>,
|
||||
recently_opened_entries: VecDeque<RecentEntry>,
|
||||
recently_opened_entries: VecDeque<HistoryEntryId>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
_save_recently_opened_entries_task: Task<()>,
|
||||
}
|
||||
@@ -95,8 +76,7 @@ impl HistoryStore {
|
||||
pub fn new(
|
||||
thread_store: Entity<ThreadStore>,
|
||||
context_store: Entity<assistant_context_editor::ContextStore>,
|
||||
initial_recent_entries: impl IntoIterator<Item = RecentEntry>,
|
||||
window: &mut Window,
|
||||
initial_recent_entries: impl IntoIterator<Item = HistoryEntryId>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let subscriptions = vec![
|
||||
@@ -104,68 +84,20 @@ impl HistoryStore {
|
||||
cx.observe(&context_store, |_, _, cx| cx.notify()),
|
||||
];
|
||||
|
||||
window
|
||||
.spawn(cx, {
|
||||
let thread_store = thread_store.downgrade();
|
||||
let context_store = context_store.downgrade();
|
||||
let this = cx.weak_entity();
|
||||
async move |cx| {
|
||||
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
|
||||
let contents = cx
|
||||
.background_spawn(async move { std::fs::read_to_string(path) })
|
||||
.await
|
||||
.ok()?;
|
||||
let entries = serde_json::from_str::<Vec<SerializedRecentEntry>>(&contents)
|
||||
.context("deserializing persisted agent panel navigation history")
|
||||
.log_err()?
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.map(|serialized| match serialized {
|
||||
SerializedRecentEntry::Thread(id) => thread_store
|
||||
.update_in(cx, |thread_store, window, cx| {
|
||||
let thread_id = ThreadId::from(id.as_str());
|
||||
thread_store
|
||||
.open_thread(&thread_id, window, cx)
|
||||
.map_ok(|thread| RecentEntry::Thread(thread_id, thread))
|
||||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async {
|
||||
anyhow::bail!("no thread store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
SerializedRecentEntry::Context(id) => context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store
|
||||
.open_local_context(Path::new(&id).into(), cx)
|
||||
.map_ok(RecentEntry::Context)
|
||||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async {
|
||||
anyhow::bail!("no context store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
});
|
||||
let entries = join_all(entries)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_err())
|
||||
.collect::<VecDeque<_>>();
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
this.recently_opened_entries.extend(entries);
|
||||
this.recently_opened_entries
|
||||
.truncate(MAX_RECENTLY_OPENED_ENTRIES);
|
||||
})
|
||||
.ok();
|
||||
|
||||
Some(())
|
||||
}
|
||||
cx.spawn(async move |this, cx| {
|
||||
let entries = Self::load_recently_opened_entries(cx).await.log_err()?;
|
||||
this.update(cx, |this, _| {
|
||||
this.recently_opened_entries
|
||||
.extend(
|
||||
entries.into_iter().take(
|
||||
MAX_RECENTLY_OPENED_ENTRIES
|
||||
.saturating_sub(this.recently_opened_entries.len()),
|
||||
),
|
||||
);
|
||||
})
|
||||
.detach();
|
||||
.ok()
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
thread_store,
|
||||
@@ -184,19 +116,20 @@ impl HistoryStore {
|
||||
return history_entries;
|
||||
}
|
||||
|
||||
for thread in self
|
||||
.thread_store
|
||||
.update(cx, |this, _cx| this.reverse_chronological_threads())
|
||||
{
|
||||
history_entries.push(HistoryEntry::Thread(thread));
|
||||
}
|
||||
|
||||
for context in self
|
||||
.context_store
|
||||
.update(cx, |this, _cx| this.reverse_chronological_contexts())
|
||||
{
|
||||
history_entries.push(HistoryEntry::Context(context));
|
||||
}
|
||||
history_entries.extend(
|
||||
self.thread_store
|
||||
.read(cx)
|
||||
.reverse_chronological_threads()
|
||||
.cloned()
|
||||
.map(HistoryEntry::Thread),
|
||||
);
|
||||
history_entries.extend(
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.unordered_contexts()
|
||||
.cloned()
|
||||
.map(HistoryEntry::Context),
|
||||
);
|
||||
|
||||
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
|
||||
history_entries
|
||||
@@ -206,15 +139,62 @@ impl HistoryStore {
|
||||
self.entries(cx).into_iter().take(limit).collect()
|
||||
}
|
||||
|
||||
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
|
||||
#[cfg(debug_assertions)]
|
||||
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let thread_entries = self
|
||||
.thread_store
|
||||
.read(cx)
|
||||
.reverse_chronological_threads()
|
||||
.flat_map(|thread| {
|
||||
self.recently_opened_entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(index, entry)| match entry {
|
||||
HistoryEntryId::Thread(id) if &thread.id == id => {
|
||||
Some((index, HistoryEntry::Thread(thread.clone())))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
let context_entries =
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.unordered_contexts()
|
||||
.flat_map(|context| {
|
||||
self.recently_opened_entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(index, entry)| match entry {
|
||||
HistoryEntryId::Context(path) if &context.path == path => {
|
||||
Some((index, HistoryEntry::Context(context.clone())))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
thread_entries
|
||||
.chain(context_entries)
|
||||
// optimization to halt iteration early
|
||||
.take(self.recently_opened_entries.len())
|
||||
.sorted_unstable_by_key(|(index, _)| *index)
|
||||
.map(|(_, entry)| entry)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn save_recently_opened_entries(&mut self, cx: &mut Context<Self>) {
|
||||
let serialized_entries = self
|
||||
.recently_opened_entries
|
||||
.iter()
|
||||
.filter_map(|entry| match entry {
|
||||
RecentEntry::Context(context) => Some(SerializedRecentEntry::Context(
|
||||
context.read(cx).path()?.to_str()?.to_owned(),
|
||||
)),
|
||||
RecentEntry::Thread(id, _) => Some(SerializedRecentEntry::Thread(id.to_string())),
|
||||
HistoryEntryId::Context(path) => path.file_name().map(|file| {
|
||||
SerializedRecentOpen::ContextName(file.to_string_lossy().to_string())
|
||||
}),
|
||||
HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -233,7 +213,33 @@ impl HistoryStore {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn push_recently_opened_entry(&mut self, entry: RecentEntry, cx: &mut Context<Self>) {
|
||||
fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<Vec<HistoryEntryId>>> {
|
||||
cx.background_spawn(async move {
|
||||
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
|
||||
let contents = smol::fs::read_to_string(path).await?;
|
||||
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&contents)
|
||||
.context("deserializing persisted agent panel navigation history")?
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.flat_map(|entry| match entry {
|
||||
SerializedRecentOpen::Thread(id) => {
|
||||
Some(HistoryEntryId::Thread(id.as_str().into()))
|
||||
}
|
||||
SerializedRecentOpen::ContextName(file_name) => Some(HistoryEntryId::Context(
|
||||
contexts_dir().join(file_name).into(),
|
||||
)),
|
||||
SerializedRecentOpen::Context(path) => {
|
||||
Path::new(&path).file_name().map(|file_name| {
|
||||
HistoryEntryId::Context(contexts_dir().join(file_name).into())
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(entries)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries
|
||||
.retain(|old_entry| old_entry != &entry);
|
||||
self.recently_opened_entries.push_front(entry);
|
||||
@@ -244,24 +250,33 @@ impl HistoryStore {
|
||||
|
||||
pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries.retain(|entry| match entry {
|
||||
RecentEntry::Thread(thread_id, _) if thread_id == &id => false,
|
||||
HistoryEntryId::Thread(thread_id) if thread_id == &id => false,
|
||||
_ => true,
|
||||
});
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn remove_recently_opened_entry(&mut self, entry: &RecentEntry, cx: &mut Context<Self>) {
|
||||
pub fn replace_recently_opened_text_thread(
|
||||
&mut self,
|
||||
old_path: &Path,
|
||||
new_path: &Arc<Path>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
for entry in &mut self.recently_opened_entries {
|
||||
match entry {
|
||||
HistoryEntryId::Context(path) if path.as_ref() == old_path => {
|
||||
*entry = HistoryEntryId::Context(new_path.clone());
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries
|
||||
.retain(|old_entry| old_entry != entry);
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn recently_opened_entries(&self, _cx: &mut Context<Self>) -> VecDeque<RecentEntry> {
|
||||
#[cfg(debug_assertions)]
|
||||
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
|
||||
return VecDeque::new();
|
||||
}
|
||||
|
||||
self.recently_opened_entries.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1011,7 +1011,7 @@ impl InlineAssistant {
|
||||
self.update_editor_highlights(&editor, cx);
|
||||
}
|
||||
} else {
|
||||
entry.get().highlight_updates.send(()).ok();
|
||||
entry.get_mut().highlight_updates.send(()).ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1331,7 +1331,7 @@ impl InlineAssistant {
|
||||
editor.clear_gutter_highlights::<GutterPendingRange>(cx);
|
||||
} else {
|
||||
editor.highlight_gutter::<GutterPendingRange>(
|
||||
&gutter_pending_ranges,
|
||||
gutter_pending_ranges,
|
||||
|cx| cx.theme().status().info_background,
|
||||
cx,
|
||||
)
|
||||
@@ -1342,7 +1342,7 @@ impl InlineAssistant {
|
||||
editor.clear_gutter_highlights::<GutterTransformedRange>(cx);
|
||||
} else {
|
||||
editor.highlight_gutter::<GutterTransformedRange>(
|
||||
&gutter_transformed_ranges,
|
||||
gutter_transformed_ranges,
|
||||
|cx| cx.theme().status().info,
|
||||
cx,
|
||||
)
|
||||
@@ -1519,7 +1519,7 @@ impl InlineAssistant {
|
||||
struct EditorInlineAssists {
|
||||
assist_ids: Vec<InlineAssistId>,
|
||||
scroll_lock: Option<InlineAssistScrollLock>,
|
||||
highlight_updates: async_watch::Sender<()>,
|
||||
highlight_updates: watch::Sender<()>,
|
||||
_update_highlights: Task<Result<()>>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
}
|
||||
@@ -1531,7 +1531,7 @@ struct InlineAssistScrollLock {
|
||||
|
||||
impl EditorInlineAssists {
|
||||
fn new(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) -> Self {
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = async_watch::channel(());
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = watch::channel(());
|
||||
Self {
|
||||
assist_ids: Vec::new(),
|
||||
scroll_lock: None,
|
||||
@@ -1689,7 +1689,7 @@ impl InlineAssist {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
InlineAssistant::update_global(cx, |this, cx| {
|
||||
if let Some(editor_assists) =
|
||||
this.assists_by_editor.get(&editor.downgrade())
|
||||
this.assists_by_editor.get_mut(&editor.downgrade())
|
||||
{
|
||||
editor_assists.highlight_updates.send(()).ok();
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::agent_model_selector::{AgentModelSelector, ModelType};
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::buffer_codegen::BufferCodegen;
|
||||
use crate::context::ContextCreasesAddon;
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider};
|
||||
@@ -7,12 +7,13 @@ use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::message_editor::{extract_message_creases, insert_message_creases};
|
||||
use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist};
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use assistant_context_editor::language_model_selector::ToggleModelSelector;
|
||||
use client::ErrorExt;
|
||||
use collections::VecDeque;
|
||||
use db::kvp::Dismissable;
|
||||
use editor::actions::Paste;
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
@@ -99,6 +100,7 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
|
||||
v_flex()
|
||||
.key_context("PromptEditor")
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.block_mouse_except_scroll()
|
||||
.gap_0p5()
|
||||
@@ -303,6 +305,10 @@ impl<T: 'static> PromptEditor<T> {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
crate::active_thread::attach_pasted_images_as_context(&self.context_store, cx);
|
||||
}
|
||||
|
||||
fn toggle_rate_limit_notice(
|
||||
&mut self,
|
||||
_: &ClickEvent,
|
||||
@@ -912,6 +918,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
text_thread_store.clone(),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::Thread,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -930,7 +937,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
fs,
|
||||
model_selector_menu_handle,
|
||||
prompt_editor.focus_handle(cx),
|
||||
ModelType::InlineAssistant,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1083,6 +1090,7 @@ impl PromptEditor<TerminalCodegen> {
|
||||
text_thread_store.clone(),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::Thread,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -1101,7 +1109,7 @@ impl PromptEditor<TerminalCodegen> {
|
||||
fs,
|
||||
model_selector_menu_handle.clone(),
|
||||
prompt_editor.focus_handle(cx),
|
||||
ModelType::InlineAssistant,
|
||||
ModelUsageContext::InlineAssistant,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -2,11 +2,11 @@ use std::collections::BTreeMap;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::agent_model_selector::{AgentModelSelector, ModelType};
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::context::{AgentContextKey, ContextCreasesAddon, ContextLoadResult, load_context};
|
||||
use crate::tool_compatibility::{IncompatibleToolsState, IncompatibleToolsTooltip};
|
||||
use crate::ui::{
|
||||
AnimatedLabel, MaxModeTooltip,
|
||||
MaxModeTooltip,
|
||||
preview::{AgentPreview, UsageCallout},
|
||||
};
|
||||
use agent_settings::{AgentSettings, CompletionMode};
|
||||
@@ -24,10 +24,10 @@ use fs::Fs;
|
||||
use futures::future::Shared;
|
||||
use futures::{FutureExt as _, future};
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, ClipboardEntry, Entity, EventEmitter, Focusable, Subscription,
|
||||
Task, TextStyle, WeakEntity, linear_color_stop, linear_gradient, point, pulsating_between,
|
||||
Animation, AnimationExt, App, Entity, EventEmitter, Focusable, Subscription, Task, TextStyle,
|
||||
WeakEntity, linear_color_stop, linear_gradient, point, pulsating_between,
|
||||
};
|
||||
use language::{Buffer, Language};
|
||||
use language::{Buffer, Language, Point};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModelRequestMessage, MessageContent, RequestUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
@@ -51,9 +51,9 @@ use crate::profile_selector::ProfileSelector;
|
||||
use crate::thread::{MessageCrease, Thread, TokenUsageRatio};
|
||||
use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
use crate::{
|
||||
ActiveThread, AgentDiffPane, Chat, ChatWithFollow, ExpandMessageEditor, Follow, NewThread,
|
||||
OpenAgentDiff, RemoveAllContext, ToggleBurnMode, ToggleContextPicker, ToggleProfileSelector,
|
||||
register_agent_preview,
|
||||
ActiveThread, AgentDiffPane, Chat, ChatWithFollow, ExpandMessageEditor, Follow, KeepAll,
|
||||
ModelUsageContext, NewThread, OpenAgentDiff, RejectAll, RemoveAllContext, ToggleBurnMode,
|
||||
ToggleContextPicker, ToggleProfileSelector, register_agent_preview,
|
||||
};
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
@@ -112,6 +112,7 @@ pub(crate) fn create_editor(
|
||||
editor.set_placeholder_text("Message the agent – @ to include context", cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_soft_wrap();
|
||||
editor.set_use_modal_editing(true);
|
||||
editor.set_context_menu_options(ContextMenuOptions {
|
||||
min_entries_visible: 12,
|
||||
max_entries_visible: 12,
|
||||
@@ -168,13 +169,13 @@ impl MessageEditor {
|
||||
Some(text_thread_store.clone()),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::File,
|
||||
ModelUsageContext::Thread(thread.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let incompatible_tools =
|
||||
cx.new(|cx| IncompatibleToolsState::new(thread.read(cx).tools().clone(), cx));
|
||||
let incompatible_tools = cx.new(|cx| IncompatibleToolsState::new(thread.clone(), cx));
|
||||
|
||||
let subscriptions = vec![
|
||||
cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event),
|
||||
@@ -196,21 +197,14 @@ impl MessageEditor {
|
||||
fs.clone(),
|
||||
model_selector_menu_handle,
|
||||
editor.focus_handle(cx),
|
||||
ModelType::Default(thread.clone()),
|
||||
ModelUsageContext::Thread(thread.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let profile_selector = cx.new(|cx| {
|
||||
ProfileSelector::new(
|
||||
fs,
|
||||
thread.clone(),
|
||||
thread_store,
|
||||
editor.focus_handle(cx),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let profile_selector =
|
||||
cx.new(|cx| ProfileSelector::new(fs, thread.clone(), editor.focus_handle(cx), cx));
|
||||
|
||||
Self {
|
||||
editor: editor.clone(),
|
||||
@@ -430,39 +424,24 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, _: &mut Window, cx: &mut Context<Self>) {
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
return;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
self.context_store.update(cx, |store, cx| {
|
||||
for image in images {
|
||||
store.add_image_instance(Arc::new(image), cx);
|
||||
}
|
||||
});
|
||||
crate::active_thread::attach_pasted_images_as_context(&self.context_store, cx);
|
||||
}
|
||||
|
||||
fn handle_review_click(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.edits_expanded = true;
|
||||
AgentDiffPane::deploy(self.thread.clone(), self.workspace.clone(), window, cx).log_err();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_edit_bar_expand(&mut self, cx: &mut Context<Self>) {
|
||||
self.edits_expanded = !self.edits_expanded;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_file_click(
|
||||
&self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -493,6 +472,40 @@ impl MessageEditor {
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_accept_all(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.keep_all_edits(cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_reject_all(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Since there's no reject_all_edits method in the thread API,
|
||||
// we need to iterate through all buffers and reject their edits
|
||||
let action_log = self.thread.read(cx).action_log().clone();
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
for (buffer, _) in changed_buffers {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
let buffer_snapshot = buffer.read(cx);
|
||||
let start = buffer_snapshot.anchor_before(Point::new(0, 0));
|
||||
let end = buffer_snapshot.anchor_after(buffer_snapshot.max_point());
|
||||
thread
|
||||
.reject_edits_in_ranges(buffer, vec![start..end], cx)
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_max_mode_toggle(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
|
||||
let thread = self.thread.read(cx);
|
||||
let model = thread.configured_model();
|
||||
@@ -614,6 +627,12 @@ impl MessageEditor {
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::expand_message_editor))
|
||||
.on_action(cx.listener(Self::toggle_burn_mode))
|
||||
.on_action(
|
||||
cx.listener(|this, _: &KeepAll, window, cx| this.handle_accept_all(window, cx)),
|
||||
)
|
||||
.on_action(
|
||||
cx.listener(|this, _: &RejectAll, window, cx| this.handle_reject_all(window, cx)),
|
||||
)
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.gap_2()
|
||||
.p_2()
|
||||
@@ -869,7 +888,10 @@ impl MessageEditor {
|
||||
let bg_edit_files_disclosure = editor_bg_color.blend(active_color.opacity(0.3));
|
||||
|
||||
let is_edit_changes_expanded = self.edits_expanded;
|
||||
let is_generating = self.thread.read(cx).is_generating();
|
||||
let thread = self.thread.read(cx);
|
||||
let pending_edits = thread.has_pending_edit_tool_uses();
|
||||
|
||||
const EDIT_NOT_READY_TOOLTIP_LABEL: &str = "Wait until file edits are complete.";
|
||||
|
||||
v_flex()
|
||||
.mt_1()
|
||||
@@ -887,31 +909,28 @@ impl MessageEditor {
|
||||
}])
|
||||
.child(
|
||||
h_flex()
|
||||
.id("edits-container")
|
||||
.cursor_pointer()
|
||||
.p_1p5()
|
||||
.p_1()
|
||||
.justify_between()
|
||||
.when(is_edit_changes_expanded, |this| {
|
||||
this.border_b_1().border_color(border_color)
|
||||
})
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| this.handle_review_click(window, cx)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.id("edits-container")
|
||||
.cursor_pointer()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.child(
|
||||
Disclosure::new("edits-disclosure", is_edit_changes_expanded)
|
||||
.on_click(cx.listener(|this, _ev, _window, cx| {
|
||||
this.edits_expanded = !this.edits_expanded;
|
||||
cx.notify();
|
||||
.on_click(cx.listener(|this, _, _, cx| {
|
||||
this.handle_edit_bar_expand(cx)
|
||||
})),
|
||||
)
|
||||
.map(|this| {
|
||||
if is_generating {
|
||||
if pending_edits {
|
||||
this.child(
|
||||
AnimatedLabel::new(format!(
|
||||
"Editing {} {}",
|
||||
Label::new(format!(
|
||||
"Editing {} {}…",
|
||||
changed_buffers.len(),
|
||||
if changed_buffers.len() == 1 {
|
||||
"file"
|
||||
@@ -919,7 +938,15 @@ impl MessageEditor {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.with_animation(
|
||||
"edit-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.3, 0.7)),
|
||||
|label, delta| label.alpha(delta),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
this.child(
|
||||
@@ -944,23 +971,74 @@ impl MessageEditor {
|
||||
.color(Color::Muted),
|
||||
)
|
||||
}
|
||||
}),
|
||||
})
|
||||
.on_click(
|
||||
cx.listener(|this, _, _, cx| this.handle_edit_bar_expand(cx)),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Button::new("review", "Review Changes")
|
||||
.label_size(LabelSize::Small)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&OpenAgentDiff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new("review-changes", IconName::ListTodo)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Review Changes",
|
||||
&OpenAgentDiff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_review_click(window, cx)
|
||||
})),
|
||||
)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_review_click(window, cx)
|
||||
})),
|
||||
.child(ui::Divider::vertical().color(ui::DividerColor::Border))
|
||||
.child(
|
||||
Button::new("reject-all-changes", "Reject All")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.when(pending_edits, |this| {
|
||||
this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL))
|
||||
})
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&RejectAll,
|
||||
&focus_handle.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.))),
|
||||
)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_reject_all(window, cx)
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("accept-all-changes", "Accept All")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.when(pending_edits, |this| {
|
||||
this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL))
|
||||
})
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&KeepAll,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.))),
|
||||
)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.handle_accept_all(window, cx)
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.when(is_edit_changes_expanded, |parent| {
|
||||
|
||||
@@ -1,26 +1,24 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent_settings::{
|
||||
AgentDockPosition, AgentProfile, AgentProfileId, AgentSettings, GroupedAgentProfiles,
|
||||
builtin_profiles,
|
||||
};
|
||||
use agent_settings::{AgentDockPosition, AgentProfileId, AgentSettings, builtin_profiles};
|
||||
use fs::Fs;
|
||||
use gpui::{Action, Empty, Entity, FocusHandle, Subscription, WeakEntity, prelude::*};
|
||||
use gpui::{Action, Empty, Entity, FocusHandle, Subscription, prelude::*};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use settings::{Settings as _, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
ContextMenu, ContextMenuEntry, DocumentationSide, PopoverMenu, PopoverMenuHandle, Tooltip,
|
||||
prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{ManageProfiles, Thread, ThreadStore, ToggleProfileSelector};
|
||||
use crate::{
|
||||
ManageProfiles, Thread, ToggleProfileSelector,
|
||||
agent_profile::{AgentProfile, AvailableProfiles},
|
||||
};
|
||||
|
||||
pub struct ProfileSelector {
|
||||
profiles: GroupedAgentProfiles,
|
||||
profiles: AvailableProfiles,
|
||||
fs: Arc<dyn Fs>,
|
||||
thread: Entity<Thread>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
focus_handle: FocusHandle,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
@@ -30,7 +28,6 @@ impl ProfileSelector {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
thread: Entity<Thread>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
focus_handle: FocusHandle,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -39,10 +36,9 @@ impl ProfileSelector {
|
||||
});
|
||||
|
||||
Self {
|
||||
profiles: GroupedAgentProfiles::from_settings(AgentSettings::get_global(cx)),
|
||||
profiles: AgentProfile::available_profiles(cx),
|
||||
fs,
|
||||
thread,
|
||||
thread_store,
|
||||
menu_handle: PopoverMenuHandle::default(),
|
||||
focus_handle,
|
||||
_subscriptions: vec![settings_subscription],
|
||||
@@ -54,7 +50,7 @@ impl ProfileSelector {
|
||||
}
|
||||
|
||||
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
|
||||
self.profiles = GroupedAgentProfiles::from_settings(AgentSettings::get_global(cx));
|
||||
self.profiles = AgentProfile::available_profiles(cx);
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
@@ -64,21 +60,30 @@ impl ProfileSelector {
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
for (profile_id, profile) in self.profiles.builtin.iter() {
|
||||
|
||||
let mut found_non_builtin = false;
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if !builtin_profiles::is_builtin(profile_id) {
|
||||
found_non_builtin = true;
|
||||
continue;
|
||||
}
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
profile_name,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
|
||||
if !self.profiles.custom.is_empty() {
|
||||
if found_non_builtin {
|
||||
menu = menu.separator().header("Custom Profiles");
|
||||
for (profile_id, profile) in self.profiles.custom.iter() {
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if builtin_profiles::is_builtin(profile_id) {
|
||||
continue;
|
||||
}
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
profile_name,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
@@ -99,19 +104,20 @@ impl ProfileSelector {
|
||||
fn menu_entry_for_profile(
|
||||
&self,
|
||||
profile_id: AgentProfileId,
|
||||
profile: &AgentProfile,
|
||||
profile_name: &SharedString,
|
||||
settings: &AgentSettings,
|
||||
_cx: &App,
|
||||
cx: &App,
|
||||
) -> ContextMenuEntry {
|
||||
let documentation = match profile.name.to_lowercase().as_str() {
|
||||
let documentation = match profile_name.to_lowercase().as_str() {
|
||||
builtin_profiles::WRITE => Some("Get help to write anything."),
|
||||
builtin_profiles::ASK => Some("Chat about your codebase."),
|
||||
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
|
||||
_ => None,
|
||||
};
|
||||
let thread_profile_id = self.thread.read(cx).profile().id();
|
||||
|
||||
let entry = ContextMenuEntry::new(profile.name.clone())
|
||||
.toggleable(IconPosition::End, profile_id == settings.default_profile);
|
||||
let entry = ContextMenuEntry::new(profile_name.clone())
|
||||
.toggleable(IconPosition::End, &profile_id == thread_profile_id);
|
||||
|
||||
let entry = if let Some(doc_text) = documentation {
|
||||
entry.documentation_aside(documentation_side(settings.dock), move |_| {
|
||||
@@ -123,7 +129,7 @@ impl ProfileSelector {
|
||||
|
||||
entry.handler({
|
||||
let fs = self.fs.clone();
|
||||
let thread_store = self.thread_store.clone();
|
||||
let thread = self.thread.clone();
|
||||
let profile_id = profile_id.clone();
|
||||
move |_window, cx| {
|
||||
update_settings_file::<AgentSettings>(fs.clone(), cx, {
|
||||
@@ -133,11 +139,9 @@ impl ProfileSelector {
|
||||
}
|
||||
});
|
||||
|
||||
thread_store
|
||||
.update(cx, |this, cx| {
|
||||
this.load_profile_by_id(profile_id.clone(), cx);
|
||||
})
|
||||
.log_err();
|
||||
thread.update(cx, |this, cx| {
|
||||
this.set_profile(profile_id.clone(), cx);
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -146,7 +150,7 @@ impl ProfileSelector {
|
||||
impl Render for ProfileSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let profile_id = &settings.default_profile;
|
||||
let profile_id = self.thread.read(cx).profile().id();
|
||||
let profile = settings.profiles.get(profile_id);
|
||||
|
||||
let selected_profile = profile
|
||||
|
||||
@@ -179,18 +179,17 @@ impl TerminalTransaction {
|
||||
// Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal
|
||||
let input = Self::sanitize_input(hunk);
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(input));
|
||||
.update(cx, |terminal, _| terminal.input(input.into_bytes()));
|
||||
}
|
||||
|
||||
pub fn undo(&self, cx: &mut App) {
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.to_string()));
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
}
|
||||
|
||||
pub fn complete(&self, cx: &mut App) {
|
||||
self.terminal.update(cx, |terminal, _| {
|
||||
terminal.input(CARRIAGE_RETURN.to_string())
|
||||
});
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CARRIAGE_RETURN.as_bytes()));
|
||||
}
|
||||
|
||||
fn sanitize_input(mut input: String) -> String {
|
||||
|
||||
@@ -106,7 +106,7 @@ impl TerminalInlineAssistant {
|
||||
});
|
||||
let prompt_editor_render = prompt_editor.clone();
|
||||
let block = terminal_view::BlockProperties {
|
||||
height: 2,
|
||||
height: 4,
|
||||
render: Box::new(move |_| prompt_editor_render.clone().into_any_element()),
|
||||
};
|
||||
terminal_view.update(cx, |terminal_view, cx| {
|
||||
@@ -202,7 +202,7 @@ impl TerminalInlineAssistant {
|
||||
.update(cx, |terminal, cx| {
|
||||
terminal
|
||||
.terminal()
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.to_string()));
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
})
|
||||
.log_err();
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use agent_settings::{AgentSettings, CompletionMode};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -41,6 +41,7 @@ use uuid::Uuid;
|
||||
use zed_llm_client::{CompletionIntent, CompletionRequestStatus};
|
||||
|
||||
use crate::ThreadStore;
|
||||
use crate::agent_profile::AgentProfile;
|
||||
use crate::context::{AgentContext, AgentContextHandle, ContextLoadResult, LoadedContext};
|
||||
use crate::thread_store::{
|
||||
SerializedCrease, SerializedLanguageModel, SerializedMessage, SerializedMessageSegment,
|
||||
@@ -194,20 +195,20 @@ impl MessageSegment {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct ProjectSnapshot {
|
||||
pub worktree_snapshots: Vec<WorktreeSnapshot>,
|
||||
pub unsaved_buffer_paths: Vec<String>,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct WorktreeSnapshot {
|
||||
pub worktree_path: String,
|
||||
pub git_state: Option<GitState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct GitState {
|
||||
pub remote_url: Option<String>,
|
||||
pub head_sha: Option<String>,
|
||||
@@ -246,7 +247,7 @@ impl LastRestoreCheckpoint {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
pub enum DetailedSummaryState {
|
||||
#[default]
|
||||
NotGenerated,
|
||||
@@ -360,6 +361,7 @@ pub struct Thread {
|
||||
>,
|
||||
remaining_turns: u32,
|
||||
configured_model: Option<ConfiguredModel>,
|
||||
profile: AgentProfile,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@@ -389,7 +391,7 @@ impl ThreadSummary {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct ExceededWindowError {
|
||||
/// Model used when last message exceeded context window
|
||||
model_id: LanguageModelId,
|
||||
@@ -407,6 +409,7 @@ impl Thread {
|
||||
) -> Self {
|
||||
let (detailed_summary_tx, detailed_summary_rx) = postage::watch::channel();
|
||||
let configured_model = LanguageModelRegistry::read_global(cx).default_model();
|
||||
let profile_id = AgentSettings::get_global(cx).default_profile.clone();
|
||||
|
||||
Self {
|
||||
id: ThreadId::new(),
|
||||
@@ -449,6 +452,7 @@ impl Thread {
|
||||
request_callback: None,
|
||||
remaining_turns: u32::MAX,
|
||||
configured_model,
|
||||
profile: AgentProfile::new(profile_id, tools),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -495,6 +499,9 @@ impl Thread {
|
||||
let completion_mode = serialized
|
||||
.completion_mode
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).preferred_completion_mode);
|
||||
let profile_id = serialized
|
||||
.profile
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
|
||||
|
||||
Self {
|
||||
id,
|
||||
@@ -554,7 +561,7 @@ impl Thread {
|
||||
pending_checkpoint: None,
|
||||
project: project.clone(),
|
||||
prompt_builder,
|
||||
tools,
|
||||
tools: tools.clone(),
|
||||
tool_use,
|
||||
action_log: cx.new(|_| ActionLog::new(project)),
|
||||
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
|
||||
@@ -570,6 +577,7 @@ impl Thread {
|
||||
request_callback: None,
|
||||
remaining_turns: u32::MAX,
|
||||
configured_model,
|
||||
profile: AgentProfile::new(profile_id, tools),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -585,6 +593,17 @@ impl Thread {
|
||||
&self.id
|
||||
}
|
||||
|
||||
pub fn profile(&self) -> &AgentProfile {
|
||||
&self.profile
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
if &id != self.profile.id() {
|
||||
self.profile = AgentProfile::new(id, self.tools.clone());
|
||||
cx.emit(ThreadEvent::ProfileChanged);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.messages.is_empty()
|
||||
}
|
||||
@@ -871,7 +890,16 @@ impl Thread {
|
||||
self.tool_use
|
||||
.pending_tool_uses()
|
||||
.iter()
|
||||
.all(|tool_use| tool_use.status.is_error())
|
||||
.all(|pending_tool_use| pending_tool_use.status.is_error())
|
||||
}
|
||||
|
||||
/// Returns whether any pending tool uses may perform edits
|
||||
pub fn has_pending_edit_tool_uses(&self) -> bool {
|
||||
self.tool_use
|
||||
.pending_tool_uses()
|
||||
.iter()
|
||||
.filter(|pending_tool_use| !pending_tool_use.status.is_error())
|
||||
.any(|pending_tool_use| pending_tool_use.may_perform_edits)
|
||||
}
|
||||
|
||||
pub fn tool_uses_for_message(&self, id: MessageId, cx: &App) -> Vec<ToolUse> {
|
||||
@@ -910,8 +938,7 @@ impl Thread {
|
||||
model: Arc<dyn LanguageModel>,
|
||||
) -> Vec<LanguageModelRequestTool> {
|
||||
if model.supports_tools() {
|
||||
self.tools()
|
||||
.read(cx)
|
||||
self.profile
|
||||
.enabled_tools(cx)
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
@@ -1023,6 +1050,7 @@ impl Thread {
|
||||
id: MessageId,
|
||||
new_role: Role,
|
||||
new_segments: Vec<MessageSegment>,
|
||||
creases: Vec<MessageCrease>,
|
||||
loaded_context: Option<LoadedContext>,
|
||||
checkpoint: Option<GitStoreCheckpoint>,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -1032,6 +1060,7 @@ impl Thread {
|
||||
};
|
||||
message.role = new_role;
|
||||
message.segments = new_segments;
|
||||
message.creases = creases;
|
||||
if let Some(context) = loaded_context {
|
||||
message.loaded_context = context;
|
||||
}
|
||||
@@ -1169,6 +1198,7 @@ impl Thread {
|
||||
}),
|
||||
completion_mode: Some(this.completion_mode),
|
||||
tool_use_limit_reached: this.tool_use_limit_reached,
|
||||
profile: Some(this.profile.id().clone()),
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1533,6 +1563,9 @@ impl Thread {
|
||||
Err(LanguageModelCompletionError::Other(error)) => {
|
||||
return Err(error);
|
||||
}
|
||||
Err(err @ LanguageModelCompletionError::RateLimit(..)) => {
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
|
||||
match event {
|
||||
@@ -1673,6 +1706,7 @@ impl Thread {
|
||||
}
|
||||
CompletionRequestStatus::ToolUseLimitReached => {
|
||||
thread.tool_use_limit_reached = true;
|
||||
cx.emit(ThreadEvent::ToolUseLimitReached);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2109,7 +2143,7 @@ impl Thread {
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Thread>,
|
||||
) {
|
||||
let available_tools = self.tools.read(cx).enabled_tools(cx);
|
||||
let available_tools = self.profile.enabled_tools(cx);
|
||||
|
||||
let tool_list = available_tools
|
||||
.iter()
|
||||
@@ -2201,19 +2235,15 @@ impl Thread {
|
||||
) -> Task<()> {
|
||||
let tool_name: Arc<str> = tool.name().into();
|
||||
|
||||
let tool_result = if self.tools.read(cx).is_disabled(&tool.source(), &tool_name) {
|
||||
Task::ready(Err(anyhow!("tool is disabled: {tool_name}"))).into()
|
||||
} else {
|
||||
tool.run(
|
||||
input,
|
||||
request,
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
model,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
};
|
||||
let tool_result = tool.run(
|
||||
input,
|
||||
request,
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
model,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
// Store the card separately if it exists
|
||||
if let Some(card) = tool_result.card.clone() {
|
||||
@@ -2332,8 +2362,7 @@ impl Thread {
|
||||
let client = self.project.read(cx).client();
|
||||
|
||||
let enabled_tool_names: Vec<String> = self
|
||||
.tools()
|
||||
.read(cx)
|
||||
.profile
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.map(|tool| tool.name())
|
||||
@@ -2843,8 +2872,10 @@ pub enum ThreadEvent {
|
||||
},
|
||||
CheckpointChanged,
|
||||
ToolConfirmationNeeded,
|
||||
ToolUseLimitReached,
|
||||
CancelEditing,
|
||||
CompletionCanceled,
|
||||
ProfileChanged,
|
||||
}
|
||||
|
||||
impl EventEmitter<ThreadEvent> for Thread {}
|
||||
@@ -2859,7 +2890,7 @@ struct PendingCompletion {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{ThreadStore, context::load_context, context_store::ContextStore, thread_store};
|
||||
use agent_settings::{AgentSettings, LanguageModelParameters};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelParameters};
|
||||
use assistant_tool::ToolRegistry;
|
||||
use editor::EditorSettings;
|
||||
use gpui::TestAppContext;
|
||||
@@ -3272,6 +3303,71 @@ fn main() {{
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_storing_profile_setting_per_thread(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let (_workspace, thread_store, thread, _context_store, _model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Check that we are starting with the default profile
|
||||
let profile = cx.read(|cx| thread.read(cx).profile.clone());
|
||||
let tool_set = cx.read(|cx| thread_store.read(cx).tools());
|
||||
assert_eq!(
|
||||
profile,
|
||||
AgentProfile::new(AgentProfileId::default(), tool_set)
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_serializing_thread_profile(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let (_workspace, thread_store, thread, _context_store, _model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Profile gets serialized with default values
|
||||
let serialized = thread
|
||||
.update(cx, |thread, cx| thread.serialize(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(serialized.profile, Some(AgentProfileId::default()));
|
||||
|
||||
let deserialized = cx.update(|cx| {
|
||||
thread.update(cx, |thread, cx| {
|
||||
Thread::deserialize(
|
||||
thread.id.clone(),
|
||||
serialized,
|
||||
thread.project.clone(),
|
||||
thread.tools.clone(),
|
||||
thread.prompt_builder.clone(),
|
||||
thread.project_context.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
let tool_set = cx.read(|cx| thread_store.read(cx).tools());
|
||||
|
||||
assert_eq!(
|
||||
deserialized.profile,
|
||||
AgentProfile::new(AgentProfileId::default(), tool_set)
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_temperature_setting(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
@@ -671,7 +671,7 @@ impl RenderOnce for HistoryEntryElement {
|
||||
),
|
||||
HistoryEntry::Context(context) => (
|
||||
context.path.to_string_lossy().to_string(),
|
||||
context.title.clone().into(),
|
||||
context.title.clone(),
|
||||
context.mtime.timestamp(),
|
||||
),
|
||||
};
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::borrow::Cow;
|
||||
use std::cell::{Ref, RefCell};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, CompletionMode};
|
||||
use agent_settings::{AgentProfileId, CompletionMode};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ToolId, ToolSource, ToolWorkingSet};
|
||||
use assistant_tool::{ToolId, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::HashMap;
|
||||
use context_server::ContextServerId;
|
||||
@@ -17,8 +16,7 @@ use gpui::{
|
||||
App, BackgroundExecutor, Context, Entity, EventEmitter, Global, ReadGlobal, SharedString,
|
||||
Subscription, Task, prelude::*,
|
||||
};
|
||||
use heed::Database;
|
||||
use heed::types::SerdeBincode;
|
||||
|
||||
use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage};
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use project::{Project, ProjectItem, ProjectPath, Worktree};
|
||||
@@ -27,7 +25,6 @@ use prompt_store::{
|
||||
UserRulesContext, WorktreeContext,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use ui::Window;
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -35,14 +32,52 @@ use crate::context_server_tool::ContextServerTool;
|
||||
use crate::thread::{
|
||||
DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use sqlez::{
|
||||
bindable::{Bind, Column},
|
||||
connection::Connection,
|
||||
statement::Statement,
|
||||
};
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 6] = [
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum DataType {
|
||||
#[serde(rename = "json")]
|
||||
Json,
|
||||
#[serde(rename = "zstd")]
|
||||
Zstd,
|
||||
}
|
||||
|
||||
impl Bind for DataType {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
let value = match self {
|
||||
DataType::Json => "json",
|
||||
DataType::Zstd => "zstd",
|
||||
};
|
||||
value.bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for DataType {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (value, next_index) = String::column(statement, start_index)?;
|
||||
let data_type = match value.as_str() {
|
||||
"json" => DataType::Json,
|
||||
"zstd" => DataType::Zstd,
|
||||
_ => anyhow::bail!("Unknown data type: {}", value),
|
||||
};
|
||||
Ok((data_type, next_index))
|
||||
}
|
||||
}
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 8] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
];
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
@@ -54,7 +89,7 @@ pub fn init(cx: &mut App) {
|
||||
pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
|
||||
|
||||
impl SharedProjectContext {
|
||||
pub fn borrow(&self) -> Ref<Option<ProjectContext>> {
|
||||
pub fn borrow(&self) -> Ref<'_, Option<ProjectContext>> {
|
||||
self.0.borrow()
|
||||
}
|
||||
}
|
||||
@@ -111,12 +146,7 @@ impl ThreadStore {
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> (Self, oneshot::Receiver<()>) {
|
||||
let mut subscriptions = vec![
|
||||
cx.observe_global::<SettingsStore>(move |this: &mut Self, cx| {
|
||||
this.load_default_profile(cx);
|
||||
}),
|
||||
cx.subscribe(&project, Self::handle_project_event),
|
||||
];
|
||||
let mut subscriptions = vec![cx.subscribe(&project, Self::handle_project_event)];
|
||||
|
||||
if let Some(prompt_store) = prompt_store.as_ref() {
|
||||
subscriptions.push(cx.subscribe(
|
||||
@@ -164,7 +194,6 @@ impl ThreadStore {
|
||||
_reload_system_prompt_task: reload_system_prompt_task,
|
||||
_subscriptions: subscriptions,
|
||||
};
|
||||
this.load_default_profile(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
(this, ready_rx)
|
||||
@@ -364,16 +393,11 @@ impl ThreadStore {
|
||||
self.threads.len()
|
||||
}
|
||||
|
||||
pub fn unordered_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
|
||||
pub fn reverse_chronological_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
|
||||
// ordering is from "ORDER BY" in `list_threads`
|
||||
self.threads.iter()
|
||||
}
|
||||
|
||||
pub fn reverse_chronological_threads(&self) -> Vec<SerializedThreadMetadata> {
|
||||
let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
|
||||
threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
|
||||
threads
|
||||
}
|
||||
|
||||
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
|
||||
cx.new(|cx| {
|
||||
Thread::new(
|
||||
@@ -484,92 +508,15 @@ impl ThreadStore {
|
||||
})
|
||||
}
|
||||
|
||||
fn load_default_profile(&self, cx: &mut Context<Self>) {
|
||||
let assistant_settings = AgentSettings::get_global(cx);
|
||||
|
||||
self.load_profile_by_id(assistant_settings.default_profile.clone(), cx);
|
||||
}
|
||||
|
||||
pub fn load_profile_by_id(&self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
let assistant_settings = AgentSettings::get_global(cx);
|
||||
|
||||
if let Some(profile) = assistant_settings.profiles.get(&profile_id) {
|
||||
self.load_profile(profile.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_profile(&self, profile: AgentProfile, cx: &mut Context<Self>) {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable_all_tools(cx);
|
||||
tools.enable(
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
if profile.enable_all_context_servers {
|
||||
for context_server_id in self
|
||||
.project
|
||||
.read(cx)
|
||||
.context_server_store()
|
||||
.read(cx)
|
||||
.all_server_ids()
|
||||
{
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable_source(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.0.into(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
// Enable all the tools from all context servers, but disable the ones that are explicitly disabled
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| (!enabled).then(|| tool))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
|
||||
cx.subscribe(
|
||||
&self.project.read(cx).context_server_store(),
|
||||
Self::handle_context_server_event,
|
||||
)
|
||||
.detach();
|
||||
let context_server_store = self.project.read(cx).context_server_store();
|
||||
cx.subscribe(&context_server_store, Self::handle_context_server_event)
|
||||
.detach();
|
||||
|
||||
// Check for any servers that were already running before the handler was registered
|
||||
for server in context_server_store.read(cx).running_servers() {
|
||||
self.load_context_server_tools(server.id(), context_server_store.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_context_server_event(
|
||||
@@ -582,71 +529,71 @@ impl ThreadStore {
|
||||
match event {
|
||||
project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
|
||||
match status {
|
||||
ContextServerStatus::Starting => {}
|
||||
ContextServerStatus::Running => {
|
||||
if let Some(server) =
|
||||
context_server_store.read(cx).get_running_server(server_id)
|
||||
{
|
||||
let context_server_manager = context_server_store.clone();
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
|
||||
if let Some(tools) = protocol.list_tools().await.log_err() {
|
||||
let tool_ids = tool_working_set
|
||||
.update(cx, |tool_working_set, _| {
|
||||
tools
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
tool_working_set.insert(Arc::new(
|
||||
ContextServerTool::new(
|
||||
context_server_manager.clone(),
|
||||
server.id(),
|
||||
tool,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some(tool_ids) = tool_ids {
|
||||
this.update(cx, |this, cx| {
|
||||
this.context_server_tool_ids
|
||||
.insert(server_id, tool_ids);
|
||||
this.load_default_profile(cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
self.load_context_server_tools(server_id.clone(), context_server_store, cx);
|
||||
}
|
||||
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
|
||||
if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
|
||||
tool_working_set.update(cx, |tool_working_set, _| {
|
||||
tool_working_set.remove(&tool_ids);
|
||||
});
|
||||
self.load_default_profile(cx);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_context_server_tools(
|
||||
&self,
|
||||
server_id: ContextServerId,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
|
||||
return;
|
||||
};
|
||||
let tool_working_set = self.tools.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
|
||||
if let Some(response) = protocol
|
||||
.request::<context_server::types::requests::ListTools>(())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
let tool_ids = tool_working_set
|
||||
.update(cx, |tool_working_set, _| {
|
||||
response
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
log::info!("registering context server tool: {:?}", tool.name);
|
||||
tool_working_set.insert(Arc::new(ContextServerTool::new(
|
||||
context_server_store.clone(),
|
||||
server.id(),
|
||||
tool,
|
||||
)))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some(tool_ids) = tool_ids {
|
||||
this.update(cx, |this, _| {
|
||||
this.context_server_tool_ids.insert(server_id, tool_ids);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -656,7 +603,7 @@ pub struct SerializedThreadMetadata {
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct SerializedThread {
|
||||
pub version: String,
|
||||
pub summary: SharedString,
|
||||
@@ -678,9 +625,11 @@ pub struct SerializedThread {
|
||||
pub completion_mode: Option<CompletionMode>,
|
||||
#[serde(default)]
|
||||
pub tool_use_limit_reached: bool,
|
||||
#[serde(default)]
|
||||
pub profile: Option<AgentProfileId>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct SerializedLanguageModel {
|
||||
pub provider: String,
|
||||
pub model: String,
|
||||
@@ -741,11 +690,15 @@ impl SerializedThreadV0_1_0 {
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
SerializedThread { messages, ..self.0 }
|
||||
SerializedThread {
|
||||
messages,
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
..self.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct SerializedMessage {
|
||||
pub id: MessageId,
|
||||
pub role: Role,
|
||||
@@ -763,7 +716,7 @@ pub struct SerializedMessage {
|
||||
pub is_hidden: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum SerializedMessageSegment {
|
||||
#[serde(rename = "text")]
|
||||
@@ -781,14 +734,14 @@ pub enum SerializedMessageSegment {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct SerializedToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
pub name: SharedString,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct SerializedToolResult {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub is_error: bool,
|
||||
@@ -820,6 +773,7 @@ impl LegacySerializedThread {
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -850,7 +804,7 @@ impl LegacySerializedMessage {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub struct SerializedCrease {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
@@ -866,25 +820,27 @@ impl Global for GlobalThreadsDatabase {}
|
||||
|
||||
pub(crate) struct ThreadsDatabase {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerializedThread>,
|
||||
connection: Arc<Mutex<Connection>>,
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThread {
|
||||
type EItem = SerializedThread;
|
||||
impl ThreadsDatabase {
|
||||
fn connection(&self) -> Arc<Mutex<Connection>> {
|
||||
self.connection.clone()
|
||||
}
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(item).map(Cow::Owned).map_err(Into::into)
|
||||
const COMPRESSION_LEVEL: i32 = 3;
|
||||
}
|
||||
|
||||
impl Bind for ThreadId {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
self.to_string().bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThread {
|
||||
type DItem = SerializedThread;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
// We implement this type manually because we want to call `SerializedThread::from_json`,
|
||||
// instead of the Deserialize trait implementation for `SerializedThread`.
|
||||
SerializedThread::from_json(bytes).map_err(Into::into)
|
||||
impl Column for ThreadId {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (id_str, next_index) = String::column(statement, start_index)?;
|
||||
Ok((ThreadId::from(id_str.as_str()), next_index))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -900,8 +856,8 @@ impl ThreadsDatabase {
|
||||
let database_future = executor
|
||||
.spawn({
|
||||
let executor = executor.clone();
|
||||
let database_path = paths::data_dir().join("threads/threads-db.1.mdb");
|
||||
async move { ThreadsDatabase::new(database_path, executor) }
|
||||
let threads_dir = paths::data_dir().join("threads");
|
||||
async move { ThreadsDatabase::new(threads_dir, executor) }
|
||||
})
|
||||
.then(|result| future::ready(result.map(Arc::new).map_err(Arc::new)))
|
||||
.boxed()
|
||||
@@ -910,41 +866,144 @@ impl ThreadsDatabase {
|
||||
cx.set_global(GlobalThreadsDatabase(database_future));
|
||||
}
|
||||
|
||||
pub fn new(path: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&path)?;
|
||||
pub fn new(threads_dir: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&threads_dir)?;
|
||||
|
||||
let sqlite_path = threads_dir.join("threads.db");
|
||||
let mdb_path = threads_dir.join("threads-db.1.mdb");
|
||||
|
||||
let needs_migration_from_heed = mdb_path.exists();
|
||||
|
||||
let connection = Connection::open_file(&sqlite_path.to_string_lossy());
|
||||
|
||||
connection.exec(indoc! {"
|
||||
CREATE TABLE IF NOT EXISTS threads (
|
||||
id TEXT PRIMARY KEY,
|
||||
summary TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
data_type TEXT NOT NULL,
|
||||
data BLOB NOT NULL
|
||||
)
|
||||
"})?()
|
||||
.map_err(|e| anyhow!("Failed to create threads table: {}", e))?;
|
||||
|
||||
let db = Self {
|
||||
executor: executor.clone(),
|
||||
connection: Arc::new(Mutex::new(connection)),
|
||||
};
|
||||
|
||||
if needs_migration_from_heed {
|
||||
let db_connection = db.connection();
|
||||
let executor_clone = executor.clone();
|
||||
executor
|
||||
.spawn(async move {
|
||||
log::info!("Starting threads.db migration");
|
||||
Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?;
|
||||
std::fs::remove_dir_all(mdb_path)?;
|
||||
log::info!("threads.db migrated to sqlite");
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
// Remove this migration after 2025-09-01
|
||||
fn migrate_from_heed(
|
||||
mdb_path: &Path,
|
||||
connection: Arc<Mutex<Connection>>,
|
||||
_executor: BackgroundExecutor,
|
||||
) -> Result<()> {
|
||||
use heed::types::SerdeBincode;
|
||||
struct SerializedThreadHeed(SerializedThread);
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThreadHeed {
|
||||
type EItem = SerializedThreadHeed;
|
||||
|
||||
fn bytes_encode(
|
||||
item: &Self::EItem,
|
||||
) -> Result<std::borrow::Cow<'_, [u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(&item.0)
|
||||
.map(std::borrow::Cow::Owned)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThreadHeed {
|
||||
type DItem = SerializedThreadHeed;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
SerializedThread::from_json(bytes)
|
||||
.map(SerializedThreadHeed)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
const ONE_GB_IN_BYTES: usize = 1024 * 1024 * 1024;
|
||||
|
||||
let env = unsafe {
|
||||
heed::EnvOpenOptions::new()
|
||||
.map_size(ONE_GB_IN_BYTES)
|
||||
.max_dbs(1)
|
||||
.open(path)?
|
||||
.open(mdb_path)?
|
||||
};
|
||||
|
||||
let mut txn = env.write_txn()?;
|
||||
let threads = env.create_database(&mut txn, Some("threads"))?;
|
||||
txn.commit()?;
|
||||
let txn = env.write_txn()?;
|
||||
let threads: heed::Database<SerdeBincode<ThreadId>, SerializedThreadHeed> = env
|
||||
.open_database(&txn, Some("threads"))?
|
||||
.ok_or_else(|| anyhow!("threads database not found"))?;
|
||||
|
||||
Ok(Self {
|
||||
executor,
|
||||
env,
|
||||
threads,
|
||||
})
|
||||
for result in threads.iter(&txn)? {
|
||||
let (thread_id, thread_heed) = result?;
|
||||
Self::save_thread_sync(&connection, thread_id, thread_heed.0)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn save_thread_sync(
|
||||
connection: &Arc<Mutex<Connection>>,
|
||||
id: ThreadId,
|
||||
thread: SerializedThread,
|
||||
) -> Result<()> {
|
||||
let json_data = serde_json::to_string(&thread)?;
|
||||
let summary = thread.summary.to_string();
|
||||
let updated_at = thread.updated_at.to_rfc3339();
|
||||
|
||||
let connection = connection.lock().unwrap();
|
||||
|
||||
let compressed = zstd::encode_all(json_data.as_bytes(), Self::COMPRESSION_LEVEL)?;
|
||||
let data_type = DataType::Zstd;
|
||||
let data = compressed;
|
||||
|
||||
let mut insert = connection.exec_bound::<(ThreadId, String, String, DataType, Vec<u8>)>(indoc! {"
|
||||
INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?)
|
||||
"})?;
|
||||
|
||||
insert((id, summary, updated_at, data_type, data))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
let mut iter = threads.iter(&txn)?;
|
||||
let connection = connection.lock().unwrap();
|
||||
let mut select =
|
||||
connection.select_bound::<(), (ThreadId, String, String)>(indoc! {"
|
||||
SELECT id, summary, updated_at FROM threads ORDER BY updated_at DESC
|
||||
"})?;
|
||||
|
||||
let rows = select(())?;
|
||||
let mut threads = Vec::new();
|
||||
while let Some((key, value)) = iter.next().transpose()? {
|
||||
|
||||
for (id, summary, updated_at) in rows {
|
||||
threads.push(SerializedThreadMetadata {
|
||||
id: key,
|
||||
summary: value.summary,
|
||||
updated_at: value.updated_at,
|
||||
id,
|
||||
summary: summary.into(),
|
||||
updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -953,37 +1012,230 @@ impl ThreadsDatabase {
|
||||
}
|
||||
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
let thread = threads.get(&txn, &id)?;
|
||||
Ok(thread)
|
||||
let connection = connection.lock().unwrap();
|
||||
let mut select = connection.select_bound::<ThreadId, (DataType, Vec<u8>)>(indoc! {"
|
||||
SELECT data_type, data FROM threads WHERE id = ? LIMIT 1
|
||||
"})?;
|
||||
|
||||
let rows = select(id)?;
|
||||
if let Some((data_type, data)) = rows.into_iter().next() {
|
||||
let json_data = match data_type {
|
||||
DataType::Zstd => {
|
||||
let decompressed = zstd::decode_all(&data[..])?;
|
||||
String::from_utf8(decompressed)?
|
||||
}
|
||||
DataType::Json => String::from_utf8(data)?,
|
||||
};
|
||||
|
||||
let thread = SerializedThread::from_json(json_data.as_bytes())?;
|
||||
Ok(Some(thread))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let mut txn = env.write_txn()?;
|
||||
threads.put(&mut txn, &id, &thread)?;
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
self.executor
|
||||
.spawn(async move { Self::save_thread_sync(&connection, id, thread) })
|
||||
}
|
||||
|
||||
pub fn delete_thread(&self, id: ThreadId) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let mut txn = env.write_txn()?;
|
||||
threads.delete(&mut txn, &id)?;
|
||||
txn.commit()?;
|
||||
let connection = connection.lock().unwrap();
|
||||
|
||||
let mut delete = connection.exec_bound::<ThreadId>(indoc! {"
|
||||
DELETE FROM threads WHERE id = ?
|
||||
"})?;
|
||||
|
||||
delete(id)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::thread::{DetailedSummaryState, MessageId};
|
||||
use chrono::Utc;
|
||||
use language_model::{Role, TokenUsage};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_legacy_serialized_thread_upgrade() {
|
||||
let updated_at = Utc::now();
|
||||
let legacy_thread = LegacySerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![LegacySerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
text: "Hello, world!".to_string(),
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
}],
|
||||
initial_project_snapshot: None,
|
||||
};
|
||||
|
||||
let upgraded = legacy_thread.upgrade();
|
||||
|
||||
assert_eq!(
|
||||
upgraded,
|
||||
SerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Hello, world!".to_string()
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false
|
||||
}],
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
initial_project_snapshot: None,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
request_token_usage: vec![],
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialized_threadv0_1_0_upgrade() {
|
||||
let updated_at = Utc::now();
|
||||
let thread_v0_1_0 = SerializedThreadV0_1_0(SerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![
|
||||
SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Use tool_1".to_string(),
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
SerializedMessage {
|
||||
id: MessageId(2),
|
||||
role: Role::Assistant,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "I want to use a tool".to_string(),
|
||||
}],
|
||||
tool_uses: vec![SerializedToolUse {
|
||||
id: "abc".into(),
|
||||
name: "tool_1".into(),
|
||||
input: serde_json::Value::Null,
|
||||
}],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Here is the tool result".to_string(),
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![SerializedToolResult {
|
||||
tool_use_id: "abc".into(),
|
||||
is_error: false,
|
||||
content: LanguageModelToolResultContent::Text("abcdef".into()),
|
||||
output: Some(serde_json::Value::Null),
|
||||
}],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
],
|
||||
version: SerializedThreadV0_1_0::VERSION.to_string(),
|
||||
initial_project_snapshot: None,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
request_token_usage: vec![],
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None,
|
||||
});
|
||||
let upgraded = thread_v0_1_0.upgrade();
|
||||
|
||||
assert_eq!(
|
||||
upgraded,
|
||||
SerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![
|
||||
SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Use tool_1".to_string()
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false
|
||||
},
|
||||
SerializedMessage {
|
||||
id: MessageId(2),
|
||||
role: Role::Assistant,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "I want to use a tool".to_string(),
|
||||
}],
|
||||
tool_uses: vec![SerializedToolUse {
|
||||
id: "abc".into(),
|
||||
name: "tool_1".into(),
|
||||
input: serde_json::Value::Null,
|
||||
}],
|
||||
tool_results: vec![SerializedToolResult {
|
||||
tool_use_id: "abc".into(),
|
||||
is_error: false,
|
||||
content: LanguageModelToolResultContent::Text("abcdef".into()),
|
||||
output: Some(serde_json::Value::Null),
|
||||
}],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
],
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
initial_project_snapshot: None,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
request_token_usage: vec![],
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,33 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{Tool, ToolSource, ToolWorkingSet, ToolWorkingSetEvent};
|
||||
use assistant_tool::{Tool, ToolSource};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Context, Entity, IntoElement, Render, Subscription, Window};
|
||||
use language_model::{LanguageModel, LanguageModelToolSchemaFormat};
|
||||
use ui::prelude::*;
|
||||
|
||||
use crate::{Thread, ThreadEvent};
|
||||
|
||||
pub struct IncompatibleToolsState {
|
||||
cache: HashMap<LanguageModelToolSchemaFormat, Vec<Arc<dyn Tool>>>,
|
||||
tool_working_set: Entity<ToolWorkingSet>,
|
||||
_tool_working_set_subscription: Subscription,
|
||||
thread: Entity<Thread>,
|
||||
_thread_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl IncompatibleToolsState {
|
||||
pub fn new(tool_working_set: Entity<ToolWorkingSet>, cx: &mut Context<Self>) -> Self {
|
||||
pub fn new(thread: Entity<Thread>, cx: &mut Context<Self>) -> Self {
|
||||
let _tool_working_set_subscription =
|
||||
cx.subscribe(&tool_working_set, |this, _, event, _| match event {
|
||||
ToolWorkingSetEvent::EnabledToolsChanged => {
|
||||
cx.subscribe(&thread, |this, _, event, _| match event {
|
||||
ThreadEvent::ProfileChanged => {
|
||||
this.cache.clear();
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
|
||||
Self {
|
||||
cache: HashMap::default(),
|
||||
tool_working_set,
|
||||
_tool_working_set_subscription,
|
||||
thread,
|
||||
_thread_subscription: _tool_working_set_subscription,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,8 +39,9 @@ impl IncompatibleToolsState {
|
||||
self.cache
|
||||
.entry(model.tool_input_format())
|
||||
.or_insert_with(|| {
|
||||
self.tool_working_set
|
||||
self.thread
|
||||
.read(cx)
|
||||
.profile()
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.filter(|tool| tool.input_schema(model.tool_input_format()).is_err())
|
||||
|
||||
@@ -337,6 +337,12 @@ impl ToolUseState {
|
||||
)
|
||||
.into();
|
||||
|
||||
let may_perform_edits = self
|
||||
.tools
|
||||
.read(cx)
|
||||
.tool(&tool_use.name, cx)
|
||||
.is_some_and(|tool| tool.may_perform_edits());
|
||||
|
||||
self.pending_tool_uses_by_id.insert(
|
||||
tool_use.id.clone(),
|
||||
PendingToolUse {
|
||||
@@ -345,6 +351,7 @@ impl ToolUseState {
|
||||
name: tool_use.name.clone(),
|
||||
ui_text: ui_text.clone(),
|
||||
input: tool_use.input,
|
||||
may_perform_edits,
|
||||
status,
|
||||
},
|
||||
);
|
||||
@@ -518,6 +525,7 @@ pub struct PendingToolUse {
|
||||
pub ui_text: Arc<str>,
|
||||
pub input: serde_json::Value,
|
||||
pub status: PendingToolUseStatus,
|
||||
pub may_perform_edits: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
||||
@@ -93,20 +93,9 @@ impl ContextPill {
|
||||
Self::Suggested {
|
||||
icon_path: Some(icon_path),
|
||||
..
|
||||
}
|
||||
| Self::Added {
|
||||
context:
|
||||
AddedContext {
|
||||
icon_path: Some(icon_path),
|
||||
..
|
||||
},
|
||||
..
|
||||
} => Icon::from_path(icon_path),
|
||||
Self::Suggested { kind, .. }
|
||||
| Self::Added {
|
||||
context: AddedContext { kind, .. },
|
||||
..
|
||||
} => Icon::new(kind.icon()),
|
||||
Self::Suggested { kind, .. } => Icon::new(kind.icon()),
|
||||
Self::Added { context, .. } => context.icon(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -133,6 +122,7 @@ impl RenderOnce for ContextPill {
|
||||
on_click,
|
||||
} => {
|
||||
let status_is_error = matches!(context.status, ContextStatus::Error { .. });
|
||||
let status_is_warning = matches!(context.status, ContextStatus::Warning { .. });
|
||||
|
||||
base_pill
|
||||
.pr(if on_remove.is_some() { px(2.) } else { px(4.) })
|
||||
@@ -140,6 +130,9 @@ impl RenderOnce for ContextPill {
|
||||
if status_is_error {
|
||||
pill.bg(cx.theme().status().error_background)
|
||||
.border_color(cx.theme().status().error_border)
|
||||
} else if status_is_warning {
|
||||
pill.bg(cx.theme().status().warning_background)
|
||||
.border_color(cx.theme().status().warning_border)
|
||||
} else if *focused {
|
||||
pill.bg(color.element_background)
|
||||
.border_color(color.border_focused)
|
||||
@@ -195,7 +188,8 @@ impl RenderOnce for ContextPill {
|
||||
|label, delta| label.opacity(delta),
|
||||
)
|
||||
.into_any_element(),
|
||||
ContextStatus::Error { message } => element
|
||||
ContextStatus::Warning { message }
|
||||
| ContextStatus::Error { message } => element
|
||||
.tooltip(ui::Tooltip::text(message.clone()))
|
||||
.into_any_element(),
|
||||
}),
|
||||
@@ -270,6 +264,7 @@ pub enum ContextStatus {
|
||||
Ready,
|
||||
Loading { message: SharedString },
|
||||
Error { message: SharedString },
|
||||
Warning { message: SharedString },
|
||||
}
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
@@ -285,6 +280,19 @@ pub struct AddedContext {
|
||||
}
|
||||
|
||||
impl AddedContext {
|
||||
pub fn icon(&self) -> Icon {
|
||||
match &self.status {
|
||||
ContextStatus::Warning { .. } => Icon::new(IconName::Warning).color(Color::Warning),
|
||||
ContextStatus::Error { .. } => Icon::new(IconName::XCircle).color(Color::Error),
|
||||
_ => {
|
||||
if let Some(icon_path) = &self.icon_path {
|
||||
Icon::from_path(icon_path)
|
||||
} else {
|
||||
Icon::new(self.kind.icon())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Creates an `AddedContext` by retrieving relevant details of `AgentContext`. This returns a
|
||||
/// `None` if `DirectoryContext` or `RulesContext` no longer exist.
|
||||
///
|
||||
@@ -293,6 +301,7 @@ impl AddedContext {
|
||||
handle: AgentContextHandle,
|
||||
prompt_store: Option<&Entity<PromptStore>>,
|
||||
project: &Project,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
cx: &App,
|
||||
) -> Option<AddedContext> {
|
||||
match handle {
|
||||
@@ -304,11 +313,15 @@ impl AddedContext {
|
||||
AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)),
|
||||
AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)),
|
||||
AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle)),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle, model, cx)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_attached(context: &AgentContext, cx: &App) -> AddedContext {
|
||||
pub fn new_attached(
|
||||
context: &AgentContext,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
cx: &App,
|
||||
) -> AddedContext {
|
||||
match context {
|
||||
AgentContext::File(context) => Self::attached_file(context, cx),
|
||||
AgentContext::Directory(context) => Self::attached_directory(context),
|
||||
@@ -318,7 +331,7 @@ impl AddedContext {
|
||||
AgentContext::Thread(context) => Self::attached_thread(context),
|
||||
AgentContext::TextThread(context) => Self::attached_text_thread(context),
|
||||
AgentContext::Rules(context) => Self::attached_rules(context),
|
||||
AgentContext::Image(context) => Self::image(context.clone()),
|
||||
AgentContext::Image(context) => Self::image(context.clone(), model, cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -333,14 +346,8 @@ impl AddedContext {
|
||||
|
||||
fn file(handle: FileContextHandle, full_path: &Path, cx: &App) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
let parent = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
AddedContext {
|
||||
kind: ContextKind::File,
|
||||
name,
|
||||
@@ -370,14 +377,8 @@ impl AddedContext {
|
||||
|
||||
fn directory(handle: DirectoryContextHandle, full_path: &Path) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
let parent = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
AddedContext {
|
||||
kind: ContextKind::Directory,
|
||||
name,
|
||||
@@ -605,22 +606,45 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn image(context: ImageContext) -> AddedContext {
|
||||
fn image(
|
||||
context: ImageContext,
|
||||
model: Option<&Arc<dyn language_model::LanguageModel>>,
|
||||
cx: &App,
|
||||
) -> AddedContext {
|
||||
let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
let icon_path = FileIcons::get_icon(&full_path, cx);
|
||||
(name, parent, icon_path)
|
||||
} else {
|
||||
("Image".into(), None, None)
|
||||
};
|
||||
|
||||
let status = match context.status(model) {
|
||||
ImageStatus::Loading => ContextStatus::Loading {
|
||||
message: "Loading…".into(),
|
||||
},
|
||||
ImageStatus::Error => ContextStatus::Error {
|
||||
message: "Failed to load Image".into(),
|
||||
},
|
||||
ImageStatus::Warning => ContextStatus::Warning {
|
||||
message: format!(
|
||||
"{} doesn't support attaching Images as Context",
|
||||
model.map(|m| m.name().0).unwrap_or_else(|| "Model".into())
|
||||
)
|
||||
.into(),
|
||||
},
|
||||
ImageStatus::Ready => ContextStatus::Ready,
|
||||
};
|
||||
|
||||
AddedContext {
|
||||
kind: ContextKind::Image,
|
||||
name: "Image".into(),
|
||||
parent: None,
|
||||
name,
|
||||
parent,
|
||||
tooltip: None,
|
||||
icon_path: None,
|
||||
status: match context.status() {
|
||||
ImageStatus::Loading => ContextStatus::Loading {
|
||||
message: "Loading…".into(),
|
||||
},
|
||||
ImageStatus::Error => ContextStatus::Error {
|
||||
message: "Failed to load image".into(),
|
||||
},
|
||||
ImageStatus::Ready => ContextStatus::Ready,
|
||||
},
|
||||
icon_path,
|
||||
status,
|
||||
render_hover: Some(Rc::new({
|
||||
let image = context.original_image.clone();
|
||||
move |_, cx| {
|
||||
@@ -639,6 +663,22 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_file_name_and_directory_from_full_path(
|
||||
path: &Path,
|
||||
name_fallback: &SharedString,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
let name = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| name_fallback.clone());
|
||||
let parent = path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
|
||||
(name, parent)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ContextFileExcerpt {
|
||||
pub file_name_and_range: SharedString,
|
||||
@@ -765,37 +805,52 @@ impl Component for AddedContext {
|
||||
let mut next_context_id = ContextId::zero();
|
||||
let image_ready = (
|
||||
"Ready",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
},
|
||||
None,
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
let image_loading = (
|
||||
"Loading",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: cx
|
||||
.background_spawn(async move {
|
||||
smol::Timer::after(Duration::from_secs(60 * 5)).await;
|
||||
Some(LanguageModelImage::empty())
|
||||
})
|
||||
.shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: cx
|
||||
.background_spawn(async move {
|
||||
smol::Timer::after(Duration::from_secs(60 * 5)).await;
|
||||
Some(LanguageModelImage::empty())
|
||||
})
|
||||
.shared(),
|
||||
},
|
||||
None,
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
let image_error = (
|
||||
"Error",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
},
|
||||
None,
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
Some(
|
||||
@@ -815,3 +870,60 @@ impl Component for AddedContext {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::App;
|
||||
use language_model::{LanguageModel, fake_provider::FakeLanguageModel};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_image_context_warning_for_unsupported_model(cx: &mut App) {
|
||||
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel::default());
|
||||
assert!(!model.supports_images());
|
||||
|
||||
let image_context = ImageContext {
|
||||
context_id: ContextId::zero(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
full_path: None,
|
||||
};
|
||||
|
||||
let added_context = AddedContext::image(image_context, Some(&model), cx);
|
||||
|
||||
assert!(matches!(
|
||||
added_context.status,
|
||||
ContextStatus::Warning { .. }
|
||||
));
|
||||
|
||||
assert!(matches!(added_context.kind, ContextKind::Image));
|
||||
assert_eq!(added_context.name.as_ref(), "Image");
|
||||
assert!(added_context.parent.is_none());
|
||||
assert!(added_context.icon_path.is_none());
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_image_context_ready_for_no_model(cx: &mut App) {
|
||||
let image_context = ImageContext {
|
||||
context_id: ContextId::zero(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
full_path: None,
|
||||
};
|
||||
|
||||
let added_context = AddedContext::image(image_context, None, cx);
|
||||
|
||||
assert!(
|
||||
matches!(added_context.status, ContextStatus::Ready),
|
||||
"Expected ready status when no model provided"
|
||||
);
|
||||
|
||||
assert!(matches!(added_context.kind, ContextKind::Image));
|
||||
assert_eq!(added_context.name.as_ref(), "Image");
|
||||
assert!(added_context.parent.is_none());
|
||||
assert!(added_context.icon_path.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
gpui.workspace = true
|
||||
indexmap.workspace = true
|
||||
language_model.workspace = true
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
log.workspace = true
|
||||
|
||||
@@ -17,29 +17,6 @@ pub mod builtin_profiles {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct GroupedAgentProfiles {
|
||||
pub builtin: IndexMap<AgentProfileId, AgentProfile>,
|
||||
pub custom: IndexMap<AgentProfileId, AgentProfile>,
|
||||
}
|
||||
|
||||
impl GroupedAgentProfiles {
|
||||
pub fn from_settings(settings: &crate::AgentSettings) -> Self {
|
||||
let mut builtin = IndexMap::default();
|
||||
let mut custom = IndexMap::default();
|
||||
|
||||
for (profile_id, profile) in settings.profiles.clone() {
|
||||
if builtin_profiles::is_builtin(&profile_id) {
|
||||
builtin.insert(profile_id, profile);
|
||||
} else {
|
||||
custom.insert(profile_id, profile);
|
||||
}
|
||||
}
|
||||
|
||||
Self { builtin, custom }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AgentProfileId(pub Arc<str>);
|
||||
|
||||
@@ -63,7 +40,7 @@ impl Default for AgentProfileId {
|
||||
|
||||
/// A profile for the Zed Agent that controls its behavior.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AgentProfile {
|
||||
pub struct AgentProfileSettings {
|
||||
/// The name of the profile.
|
||||
pub name: SharedString,
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
|
||||
@@ -102,7 +102,7 @@ pub struct AgentSettings {
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub default_profile: AgentProfileId,
|
||||
pub default_view: DefaultView,
|
||||
pub profiles: IndexMap<AgentProfileId, AgentProfile>,
|
||||
pub profiles: IndexMap<AgentProfileId, AgentProfileSettings>,
|
||||
pub always_allow_tool_actions: bool,
|
||||
pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
|
||||
pub play_sound_when_agent_done: bool,
|
||||
@@ -372,6 +372,8 @@ impl AgentSettingsContent {
|
||||
None,
|
||||
None,
|
||||
Some(language_model.supports_tools()),
|
||||
Some(language_model.supports_images()),
|
||||
None,
|
||||
)),
|
||||
api_url,
|
||||
});
|
||||
@@ -529,7 +531,7 @@ impl AgentSettingsContent {
|
||||
pub fn create_profile(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
profile: AgentProfile,
|
||||
profile_settings: AgentProfileSettings,
|
||||
) -> Result<()> {
|
||||
self.v2_setting(|settings| {
|
||||
let profiles = settings.profiles.get_or_insert_default();
|
||||
@@ -540,10 +542,10 @@ impl AgentSettingsContent {
|
||||
profiles.insert(
|
||||
profile_id,
|
||||
AgentProfileContent {
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
enable_all_context_servers: Some(profile.enable_all_context_servers),
|
||||
context_servers: profile
|
||||
name: profile_settings.name.into(),
|
||||
tools: profile_settings.tools,
|
||||
enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
|
||||
context_servers: profile_settings
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
@@ -689,6 +691,7 @@ pub struct AgentSettingsContentV2 {
|
||||
pub enum CompletionMode {
|
||||
#[default]
|
||||
Normal,
|
||||
#[serde(alias = "max")]
|
||||
Burn,
|
||||
}
|
||||
|
||||
@@ -727,6 +730,7 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
"zed.dev".into(),
|
||||
"copilot_chat".into(),
|
||||
"deepseek".into(),
|
||||
"openrouter".into(),
|
||||
"mistral".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
@@ -906,7 +910,7 @@ impl Settings for AgentSettings {
|
||||
.extend(profiles.into_iter().map(|(id, profile)| {
|
||||
(
|
||||
id,
|
||||
AgentProfile {
|
||||
AgentProfileSettings {
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
enable_all_context_servers: profile
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -406,6 +407,7 @@ impl RateLimit {
|
||||
/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimitInfo {
|
||||
pub retry_after: Option<Duration>,
|
||||
pub requests: Option<RateLimit>,
|
||||
pub tokens: Option<RateLimit>,
|
||||
pub input_tokens: Option<RateLimit>,
|
||||
@@ -417,10 +419,11 @@ impl RateLimitInfo {
|
||||
// Check if any rate limit headers exist
|
||||
let has_rate_limit_headers = headers
|
||||
.keys()
|
||||
.any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
|
||||
.any(|k| k == "retry-after" || k.as_str().starts_with("anthropic-ratelimit-"));
|
||||
|
||||
if !has_rate_limit_headers {
|
||||
return Self {
|
||||
retry_after: None,
|
||||
requests: None,
|
||||
tokens: None,
|
||||
input_tokens: None,
|
||||
@@ -429,6 +432,11 @@ impl RateLimitInfo {
|
||||
}
|
||||
|
||||
Self {
|
||||
retry_after: headers
|
||||
.get("retry-after")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|v| v.parse::<u64>().ok())
|
||||
.map(Duration::from_secs),
|
||||
requests: RateLimit::from_headers("requests", headers).ok(),
|
||||
tokens: RateLimit::from_headers("tokens", headers).ok(),
|
||||
input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
|
||||
@@ -481,8 +489,8 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
.send(request)
|
||||
.await
|
||||
.context("failed to send request to Anthropic")?;
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
if response.status().is_success() {
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
let reader = BufReader::new(response.into_body());
|
||||
let stream = reader
|
||||
.lines()
|
||||
@@ -500,6 +508,8 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
})
|
||||
.boxed();
|
||||
Ok((stream, Some(rate_limits)))
|
||||
} else if let Some(retry_after) = rate_limits.retry_after {
|
||||
Err(AnthropicError::RateLimit(retry_after))
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
@@ -769,6 +779,8 @@ pub struct MessageDelta {
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AnthropicError {
|
||||
#[error("rate limit exceeded, retry after {0:?}")]
|
||||
RateLimit(Duration),
|
||||
#[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
|
||||
ApiError(ApiError),
|
||||
#[error("{0}")]
|
||||
|
||||
@@ -60,6 +60,7 @@ zed_actions.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
indoc.workspace = true
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
languages = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
|
||||
@@ -11,7 +11,7 @@ use assistant_slash_commands::FileCommandMetadata;
|
||||
use client::{self, proto, telemetry::Telemetry};
|
||||
use clock::ReplicaId;
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::{Fs, RemoveOptions};
|
||||
use fs::{Fs, RenameOptions};
|
||||
use futures::{FutureExt, StreamExt, future::Shared};
|
||||
use gpui::{
|
||||
App, AppContext as _, Context, Entity, EventEmitter, RenderImage, SharedString, Subscription,
|
||||
@@ -452,6 +452,10 @@ pub enum ContextEvent {
|
||||
MessagesEdited,
|
||||
SummaryChanged,
|
||||
SummaryGenerated,
|
||||
PathChanged {
|
||||
old_path: Option<Arc<Path>>,
|
||||
new_path: Arc<Path>,
|
||||
},
|
||||
StreamedCompletion,
|
||||
StartedThoughtProcess(Range<language::Anchor>),
|
||||
EndedThoughtProcess(language::Anchor),
|
||||
@@ -2894,22 +2898,34 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
fs.create_dir(contexts_dir().as_ref()).await?;
|
||||
fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
|
||||
.await?;
|
||||
if let Some(old_path) = old_path {
|
||||
|
||||
// rename before write ensures that only one file exists
|
||||
if let Some(old_path) = old_path.as_ref() {
|
||||
if new_path.as_path() != old_path.as_ref() {
|
||||
fs.remove_file(
|
||||
fs.rename(
|
||||
&old_path,
|
||||
RemoveOptions {
|
||||
recursive: false,
|
||||
ignore_if_not_exists: true,
|
||||
&new_path,
|
||||
RenameOptions {
|
||||
overwrite: true,
|
||||
ignore_if_exists: true,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
this.update(cx, |this, _| this.path = Some(new_path.into()))?;
|
||||
// update path before write in case it fails
|
||||
this.update(cx, {
|
||||
let new_path: Arc<Path> = new_path.clone().into();
|
||||
move |this, cx| {
|
||||
this.path = Some(new_path.clone());
|
||||
cx.emit(ContextEvent::PathChanged { old_path, new_path });
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
fs.atomic_write(new_path, serde_json::to_string(&context).unwrap())
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -3277,7 +3293,7 @@ impl SavedContextV0_1_0 {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SavedContextMetadata {
|
||||
pub title: String,
|
||||
pub title: SharedString,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: chrono::DateTime<chrono::Local>,
|
||||
}
|
||||
|
||||
@@ -580,6 +580,7 @@ impl ContextEditor {
|
||||
});
|
||||
}
|
||||
ContextEvent::SummaryGenerated => {}
|
||||
ContextEvent::PathChanged { .. } => {}
|
||||
ContextEvent::StartedThoughtProcess(range) => {
|
||||
let creases = self.insert_thought_process_output_sections(
|
||||
[(
|
||||
@@ -1646,34 +1647,35 @@ impl ContextEditor {
|
||||
let context = self.context.read(cx);
|
||||
|
||||
let mut text = String::new();
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
break;
|
||||
} else if message.offset_range.end >= selection.range().start {
|
||||
let range = cmp::max(message.offset_range.start, selection.range().start)
|
||||
..cmp::min(message.offset_range.end, selection.range().end);
|
||||
if range.is_empty() {
|
||||
let snapshot = context.buffer().read(cx).snapshot();
|
||||
let point = snapshot.offset_to_point(range.start);
|
||||
selection.start = snapshot.point_to_offset(Point::new(point.row, 0));
|
||||
selection.end = snapshot.point_to_offset(cmp::min(
|
||||
Point::new(point.row + 1, 0),
|
||||
snapshot.max_point(),
|
||||
));
|
||||
for chunk in context.buffer().read(cx).text_for_range(selection.range()) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
} else {
|
||||
for chunk in context.buffer().read(cx).text_for_range(range) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if message.offset_range.end < selection.range().end {
|
||||
text.push('\n');
|
||||
|
||||
// If selection is empty, we want to copy the entire line
|
||||
if selection.range().is_empty() {
|
||||
let snapshot = context.buffer().read(cx).snapshot();
|
||||
let point = snapshot.offset_to_point(selection.range().start);
|
||||
selection.start = snapshot.point_to_offset(Point::new(point.row, 0));
|
||||
selection.end = snapshot
|
||||
.point_to_offset(cmp::min(Point::new(point.row + 1, 0), snapshot.max_point()));
|
||||
for chunk in context.buffer().read(cx).text_for_range(selection.range()) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
} else {
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
break;
|
||||
} else if message.offset_range.end >= selection.range().start {
|
||||
let range = cmp::max(message.offset_range.start, selection.range().start)
|
||||
..cmp::min(message.offset_range.end, selection.range().end);
|
||||
if !range.is_empty() {
|
||||
for chunk in context.buffer().read(cx).text_for_range(range) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if message.offset_range.end < selection.range().end {
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(text, CopyMetadata { creases }, vec![selection])
|
||||
}
|
||||
|
||||
@@ -3264,74 +3266,92 @@ mod tests {
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
use gpui::{App, TestAppContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use pretty_assertions::assert_eq;
|
||||
use prompt_store::PromptBuilder;
|
||||
use text::OffsetRangeExt;
|
||||
use unindent::Unindent;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_paste_whole_message(cx: &mut TestAppContext) {
|
||||
let (context, context_editor, mut cx) = setup_context_editor_text(vec![
|
||||
(Role::User, "What is the Zed editor?"),
|
||||
(
|
||||
Role::Assistant,
|
||||
"Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.",
|
||||
),
|
||||
(Role::User, ""),
|
||||
],cx).await;
|
||||
|
||||
// Select & Copy whole user message
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_range(&context, 0, &mut cx),
|
||||
indoc! {"
|
||||
What is the Zed editor?
|
||||
Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.
|
||||
What is the Zed editor?
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
// Select & Copy whole assistant message
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_range(&context, 1, &mut cx),
|
||||
indoc! {"
|
||||
What is the Zed editor?
|
||||
Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.
|
||||
What is the Zed editor?
|
||||
Zed is a modern, high-performance code editor designed from the ground up for speed and collaboration.
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_paste_no_selection(cx: &mut TestAppContext) {
|
||||
cx.update(init_test);
|
||||
let (context, context_editor, mut cx) = setup_context_editor_text(
|
||||
vec![
|
||||
(Role::User, "user1"),
|
||||
(Role::Assistant, "assistant1"),
|
||||
(Role::Assistant, "assistant2"),
|
||||
(Role::User, ""),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::local(
|
||||
registry,
|
||||
None,
|
||||
None,
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
let cx = &mut VisualTestContext::from_window(*window, cx);
|
||||
// Copy and paste first assistant message
|
||||
let message_2_range = message_range(&context, 1, &mut cx);
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_2_range.start..message_2_range.start,
|
||||
indoc! {"
|
||||
user1
|
||||
assistant1
|
||||
assistant2
|
||||
assistant1
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
let context_editor = window
|
||||
.update(cx, |_, window, cx| {
|
||||
cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
fs,
|
||||
workspace.downgrade(),
|
||||
project,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.set_text("abc\ndef\nghi", window, cx);
|
||||
editor.move_to_beginning(&Default::default(), window, cx);
|
||||
})
|
||||
});
|
||||
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.copy(&Default::default(), window, cx);
|
||||
editor.paste(&Default::default(), window, cx);
|
||||
|
||||
assert_eq!(editor.text(cx), "abc\nabc\ndef\nghi");
|
||||
})
|
||||
});
|
||||
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.cut(&Default::default(), window, cx);
|
||||
assert_eq!(editor.text(cx), "abc\ndef\nghi");
|
||||
|
||||
editor.paste(&Default::default(), window, cx);
|
||||
assert_eq!(editor.text(cx), "abc\nabc\ndef\nghi");
|
||||
})
|
||||
});
|
||||
// Copy and cut second assistant message
|
||||
let message_3_range = message_range(&context, 2, &mut cx);
|
||||
assert_copy_paste_context_editor(
|
||||
&context_editor,
|
||||
message_3_range.start..message_3_range.start,
|
||||
indoc! {"
|
||||
user1
|
||||
assistant1
|
||||
assistant2
|
||||
assistant1
|
||||
assistant2
|
||||
"},
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -3408,6 +3428,129 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
async fn setup_context_editor_text(
|
||||
messages: Vec<(Role, &str)>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> (
|
||||
Entity<AssistantContext>,
|
||||
Entity<ContextEditor>,
|
||||
VisualTestContext,
|
||||
) {
|
||||
cx.update(init_test);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let context = create_context_with_messages(messages, cx);
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
let mut cx = VisualTestContext::from_window(*window, cx);
|
||||
|
||||
let context_editor = window
|
||||
.update(&mut cx, |_, window, cx| {
|
||||
cx.new(|cx| {
|
||||
let editor = ContextEditor::for_context(
|
||||
context.clone(),
|
||||
fs,
|
||||
workspace.downgrade(),
|
||||
project,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
(context, context_editor, cx)
|
||||
}
|
||||
|
||||
fn message_range(
|
||||
context: &Entity<AssistantContext>,
|
||||
message_ix: usize,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Range<usize> {
|
||||
context.update(cx, |context, cx| {
|
||||
context
|
||||
.messages(cx)
|
||||
.nth(message_ix)
|
||||
.unwrap()
|
||||
.anchor_range
|
||||
.to_offset(&context.buffer().read(cx).snapshot())
|
||||
})
|
||||
}
|
||||
|
||||
fn assert_copy_paste_context_editor<T: editor::ToOffset>(
|
||||
context_editor: &Entity<ContextEditor>,
|
||||
range: Range<T>,
|
||||
expected_text: &str,
|
||||
cx: &mut VisualTestContext,
|
||||
) {
|
||||
context_editor.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, window, cx, |s| s.select_ranges([range]));
|
||||
});
|
||||
|
||||
context_editor.copy(&Default::default(), window, cx);
|
||||
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.move_to_end(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
context_editor.paste(&Default::default(), window, cx);
|
||||
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), expected_text);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn create_context_with_messages(
|
||||
mut messages: Vec<(Role, &str)>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Entity<AssistantContext> {
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
cx.new(|cx| {
|
||||
let mut context = AssistantContext::local(
|
||||
registry,
|
||||
None,
|
||||
None,
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
cx,
|
||||
);
|
||||
let mut message_1 = context.messages(cx).next().unwrap();
|
||||
let (role, text) = messages.remove(0);
|
||||
|
||||
loop {
|
||||
if role == message_1.role {
|
||||
context.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(message_1.offset_range, text)], None, cx);
|
||||
});
|
||||
break;
|
||||
}
|
||||
let mut ids = HashSet::default();
|
||||
ids.insert(message_1.id);
|
||||
context.cycle_message_roles(ids, cx);
|
||||
message_1 = context.messages(cx).next().unwrap();
|
||||
}
|
||||
|
||||
let mut last_message_id = message_1.id;
|
||||
for (role, text) in messages {
|
||||
context.insert_message_after(last_message_id, role, MessageStatus::Done, cx);
|
||||
let message = context.messages(cx).last().unwrap();
|
||||
last_message_id = message.id;
|
||||
context.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(message.offset_range, text)], None, cx);
|
||||
})
|
||||
}
|
||||
|
||||
context
|
||||
})
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut App) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
prompt_store::init(cx);
|
||||
|
||||
@@ -347,12 +347,6 @@ impl ContextStore {
|
||||
self.contexts_metadata.iter()
|
||||
}
|
||||
|
||||
pub fn reverse_chronological_contexts(&self) -> Vec<SavedContextMetadata> {
|
||||
let mut contexts = self.contexts_metadata.iter().cloned().collect::<Vec<_>>();
|
||||
contexts.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.mtime));
|
||||
contexts
|
||||
}
|
||||
|
||||
pub fn create(&mut self, cx: &mut Context<Self>) -> Entity<AssistantContext> {
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::local(
|
||||
@@ -618,6 +612,16 @@ impl ContextStore {
|
||||
ContextEvent::SummaryChanged => {
|
||||
self.advertise_contexts(cx);
|
||||
}
|
||||
ContextEvent::PathChanged { old_path, new_path } => {
|
||||
if let Some(old_path) = old_path.as_ref() {
|
||||
for metadata in &mut self.contexts_metadata {
|
||||
if &metadata.path == old_path {
|
||||
metadata.path = new_path.clone();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ContextEvent::Operation(operation) => {
|
||||
let context_id = context.read(cx).id().to_proto();
|
||||
let operation = operation.to_proto();
|
||||
@@ -792,7 +796,7 @@ impl ContextStore {
|
||||
.next()
|
||||
{
|
||||
contexts.push(SavedContextMetadata {
|
||||
title: title.to_string(),
|
||||
title: title.to_string().into(),
|
||||
path: path.into(),
|
||||
mtime: metadata.mtime.timestamp_for_user().into(),
|
||||
});
|
||||
@@ -809,74 +813,37 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
|
||||
cx.subscribe(
|
||||
&self.project.read(cx).context_server_store(),
|
||||
Self::handle_context_server_event,
|
||||
)
|
||||
.detach();
|
||||
let context_server_store = self.project.read(cx).context_server_store();
|
||||
cx.subscribe(&context_server_store, Self::handle_context_server_event)
|
||||
.detach();
|
||||
|
||||
// Check for any servers that were already running before the handler was registered
|
||||
for server in context_server_store.read(cx).running_servers() {
|
||||
self.load_context_server_slash_commands(server.id(), context_server_store.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_context_server_event(
|
||||
&mut self,
|
||||
context_server_manager: Entity<ContextServerStore>,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
event: &project::context_server_store::Event,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let slash_command_working_set = self.slash_commands.clone();
|
||||
match event {
|
||||
project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
|
||||
match status {
|
||||
ContextServerStatus::Running => {
|
||||
if let Some(server) = context_server_manager
|
||||
.read(cx)
|
||||
.get_running_server(server_id)
|
||||
{
|
||||
let context_server_manager = context_server_manager.clone();
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Prompts) {
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
let slash_command_ids = prompts
|
||||
.into_iter()
|
||||
.filter(assistant_slash_commands::acceptable_prompt)
|
||||
.map(|prompt| {
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
slash_command_working_set.insert(Arc::new(
|
||||
assistant_slash_commands::ContextServerSlashCommand::new(
|
||||
context_server_manager.clone(),
|
||||
server.id(),
|
||||
prompt,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update( cx, |this, _cx| {
|
||||
this.context_server_slash_command_ids
|
||||
.insert(server_id.clone(), slash_command_ids);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
self.load_context_server_slash_commands(
|
||||
server_id.clone(),
|
||||
context_server_store.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
|
||||
if let Some(slash_command_ids) =
|
||||
self.context_server_slash_command_ids.remove(server_id)
|
||||
{
|
||||
slash_command_working_set.remove(&slash_command_ids);
|
||||
self.slash_commands.remove(&slash_command_ids);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -884,4 +851,52 @@ impl ContextStore {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_context_server_slash_commands(
|
||||
&self,
|
||||
server_id: ContextServerId,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
|
||||
return;
|
||||
};
|
||||
let slash_command_working_set = self.slash_commands.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Prompts) {
|
||||
if let Some(response) = protocol
|
||||
.request::<context_server::types::requests::PromptsList>(())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
let slash_command_ids = response
|
||||
.prompts
|
||||
.into_iter()
|
||||
.filter(assistant_slash_commands::acceptable_prompt)
|
||||
.map(|prompt| {
|
||||
log::info!("registering context server command: {:?}", prompt.name);
|
||||
slash_command_working_set.insert(Arc::new(
|
||||
assistant_slash_commands::ContextServerSlashCommand::new(
|
||||
context_server_store.clone(),
|
||||
server.id(),
|
||||
prompt,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(cx, |this, _cx| {
|
||||
this.context_server_slash_command_ids
|
||||
.insert(server_id.clone(), slash_command_ids);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -682,11 +682,12 @@ mod tests {
|
||||
_: &AsyncApp,
|
||||
) -> BoxFuture<
|
||||
'static,
|
||||
http_client::Result<
|
||||
Result<
|
||||
BoxStream<
|
||||
'static,
|
||||
http_client::Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
|
||||
Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
|
||||
>,
|
||||
LanguageModelCompletionError,
|
||||
>,
|
||||
> {
|
||||
unimplemented!()
|
||||
|
||||
@@ -10,9 +10,7 @@ use parking_lot::Mutex;
|
||||
use project::{CompletionIntent, CompletionSource, lsp_store::CompletionDocumentation};
|
||||
use rope::Point;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
ops::Range,
|
||||
rc::Rc,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering::SeqCst},
|
||||
@@ -48,7 +46,7 @@ impl SlashCommandCompletionProvider {
|
||||
name_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let candidates = slash_commands
|
||||
.command_names(cx)
|
||||
@@ -71,28 +69,27 @@ impl SlashCommandCompletionProvider {
|
||||
.await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
Some(
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let completions = matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
@@ -118,22 +115,27 @@ impl SlashCommandCompletionProvider {
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
});
|
||||
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}]
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -147,7 +149,7 @@ impl SlashCommandCompletionProvider {
|
||||
last_argument_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
@@ -165,28 +167,27 @@ impl SlashCommandCompletionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_spawn(async move {
|
||||
Ok(Some(
|
||||
completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
let completions = completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if new_argument.after_completion.run()
|
||||
@@ -210,34 +211,42 @@ impl SlashCommandCompletionProvider {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
});
|
||||
}) as Arc<_>
|
||||
});
|
||||
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
completions,
|
||||
// TODO: Could have slash commands indicate whether their completions are incomplete.
|
||||
is_incomplete: true,
|
||||
}])
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: true,
|
||||
}]))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -251,7 +260,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
@@ -265,17 +274,17 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
position.row,
|
||||
call.arguments.last().map_or(call.name.end, |arg| arg.end) as u32,
|
||||
);
|
||||
let command_range = buffer.anchor_after(command_range_start)
|
||||
let command_range = buffer.anchor_before(command_range_start)
|
||||
..buffer.anchor_after(command_range_end);
|
||||
|
||||
let name = line[call.name.clone()].to_string();
|
||||
let (arguments, last_argument_range) = if let Some(argument) = call.arguments.last()
|
||||
{
|
||||
let last_arg_start =
|
||||
buffer.anchor_after(Point::new(position.row, argument.start as u32));
|
||||
buffer.anchor_before(Point::new(position.row, argument.start as u32));
|
||||
let first_arg_start = call.arguments.first().expect("we have the last element");
|
||||
let first_arg_start =
|
||||
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
|
||||
let first_arg_start = buffer
|
||||
.anchor_before(Point::new(position.row, first_arg_start.start as u32));
|
||||
let arguments = call
|
||||
.arguments
|
||||
.into_iter()
|
||||
@@ -288,14 +297,17 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
)
|
||||
} else {
|
||||
let start =
|
||||
buffer.anchor_after(Point::new(position.row, call.name.start as u32));
|
||||
buffer.anchor_before(Point::new(position.row, call.name.start as u32));
|
||||
(None, start..buffer_position)
|
||||
};
|
||||
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
return Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]));
|
||||
};
|
||||
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
@@ -313,22 +325,13 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_: Entity<Buffer>,
|
||||
_: Vec<usize>,
|
||||
_: Rc<RefCell<Box<[project::Completion]>>>,
|
||||
_: &mut Context<Editor>,
|
||||
) -> Task<Result<bool>> {
|
||||
Task::ready(Ok(true))
|
||||
}
|
||||
|
||||
fn is_completion_trigger(
|
||||
&self,
|
||||
buffer: &Entity<Buffer>,
|
||||
position: language::Anchor,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
@@ -86,20 +86,26 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
|
||||
let completion_result = protocol
|
||||
.completion(
|
||||
context_server::types::CompletionReference::Prompt(
|
||||
context_server::types::PromptReference {
|
||||
r#type: context_server::types::PromptReferenceType::Prompt,
|
||||
name: prompt_name,
|
||||
let response = protocol
|
||||
.request::<context_server::types::requests::CompletionComplete>(
|
||||
context_server::types::CompletionCompleteParams {
|
||||
reference: context_server::types::CompletionReference::Prompt(
|
||||
context_server::types::PromptReference {
|
||||
ty: context_server::types::PromptReferenceType::Prompt,
|
||||
name: prompt_name,
|
||||
},
|
||||
),
|
||||
argument: context_server::types::CompletionArgument {
|
||||
name: arg_name,
|
||||
value: arg_value,
|
||||
},
|
||||
),
|
||||
arg_name,
|
||||
arg_value,
|
||||
meta: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completions = completion_result
|
||||
let completions = response
|
||||
.completion
|
||||
.values
|
||||
.into_iter()
|
||||
.map(|value| ArgumentCompletion {
|
||||
@@ -138,10 +144,18 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
if let Some(server) = store.get_running_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
|
||||
let response = protocol
|
||||
.request::<context_server::types::requests::PromptsGet>(
|
||||
context_server::types::PromptsGetParams {
|
||||
name: prompt_name.clone(),
|
||||
arguments: Some(prompt_args),
|
||||
meta: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
result
|
||||
response
|
||||
.messages
|
||||
.iter()
|
||||
.all(|msg| matches!(msg.role, context_server::types::Role::User)),
|
||||
@@ -149,7 +163,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
);
|
||||
|
||||
// Extract text from user messages into a single prompt string
|
||||
let mut prompt = result
|
||||
let mut prompt = response
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter_map(|msg| match msg.content {
|
||||
@@ -167,7 +181,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
range: 0..(prompt.len()),
|
||||
icon: IconName::ZedAssistant,
|
||||
label: SharedString::from(
|
||||
result
|
||||
response
|
||||
.description
|
||||
.unwrap_or(format!("Result from {}", prompt_name)),
|
||||
),
|
||||
|
||||
@@ -29,6 +29,7 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
workspace.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use buffer_diff::BufferDiff;
|
||||
use collections::BTreeMap;
|
||||
use futures::{StreamExt, channel::mpsc};
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
@@ -92,21 +92,21 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let unreviewed_changes;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
unreviewed_changes = Patch::new(vec![Edit {
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
unreviewed_changes = Patch::default();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
unreviewed_changes,
|
||||
unreviewed_edits: unreviewed_edits,
|
||||
snapshot: text_snapshot.clone(),
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
@@ -175,7 +175,7 @@ impl ActionLog {
|
||||
.map_or(false, |file| file.disk_state() != DiskState::Deleted)
|
||||
{
|
||||
// If the buffer had been deleted by a tool, but it got
|
||||
// resurrected externally, we want to clear the changes we
|
||||
// resurrected externally, we want to clear the edits we
|
||||
// were tracking and reset the buffer's state.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.track_buffer_internal(buffer, false, cx);
|
||||
@@ -188,108 +188,274 @@ impl ActionLog {
|
||||
async fn maintain_diff(
|
||||
this: WeakEntity<Self>,
|
||||
buffer: Entity<Buffer>,
|
||||
mut diff_update: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
while let Some((author, buffer_snapshot)) = diff_update.next().await {
|
||||
let (rebase, diff, language, language_registry) =
|
||||
this.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(&buffer)
|
||||
.context("buffer not tracked")?;
|
||||
let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
|
||||
let git_diff = this
|
||||
.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.open_uncommitted_diff(buffer.clone(), cx)
|
||||
})
|
||||
})?
|
||||
.await
|
||||
.ok();
|
||||
let buffer_repo = git_store.read_with(cx, |git_store, cx| {
|
||||
git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
|
||||
})?;
|
||||
|
||||
let rebase = cx.background_spawn({
|
||||
let mut base_text = tracked_buffer.diff_base.clone();
|
||||
let old_snapshot = tracked_buffer.snapshot.clone();
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_changes = tracked_buffer.unreviewed_changes.clone();
|
||||
async move {
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
&unreviewed_changes,
|
||||
edits,
|
||||
&mut base_text,
|
||||
new_snapshot.as_rope(),
|
||||
);
|
||||
let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
|
||||
let _repo_subscription =
|
||||
if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
|
||||
cx.update(|cx| {
|
||||
let mut old_head = buffer_repo.read(cx).head_commit.clone();
|
||||
Some(cx.subscribe(git_diff, move |_, event, cx| match event {
|
||||
buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
|
||||
let new_head = buffer_repo.read(cx).head_commit.clone();
|
||||
if new_head != old_head {
|
||||
old_head = new_head;
|
||||
git_diff_updates_tx.send(()).ok();
|
||||
}
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
}
|
||||
});
|
||||
_ => {}
|
||||
}))
|
||||
})?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
anyhow::Ok((
|
||||
rebase,
|
||||
tracked_buffer.diff.clone(),
|
||||
tracked_buffer.buffer.read(cx).language().cloned(),
|
||||
tracked_buffer.buffer.read(cx).language_registry(),
|
||||
))
|
||||
})??;
|
||||
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
let diff_snapshot = BufferDiff::update_diff(
|
||||
diff.clone(),
|
||||
buffer_snapshot.clone(),
|
||||
Some(new_base_text),
|
||||
true,
|
||||
false,
|
||||
language,
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut unreviewed_changes = Patch::default();
|
||||
if let Ok(diff_snapshot) = diff_snapshot {
|
||||
unreviewed_changes = cx
|
||||
.background_spawn({
|
||||
let diff_snapshot = diff_snapshot.clone();
|
||||
let buffer_snapshot = buffer_snapshot.clone();
|
||||
let new_diff_base = new_diff_base.clone();
|
||||
async move {
|
||||
let mut unreviewed_changes = Patch::default();
|
||||
for hunk in diff_snapshot.hunks_intersecting_range(
|
||||
Anchor::MIN..Anchor::MAX,
|
||||
&buffer_snapshot,
|
||||
) {
|
||||
let old_range = new_diff_base
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
let new_range = hunk.range.start..hunk.range.end;
|
||||
unreviewed_changes.push(point_to_row_edit(
|
||||
Edit {
|
||||
old: old_range,
|
||||
new: new_range,
|
||||
},
|
||||
&new_diff_base,
|
||||
&buffer_snapshot.as_rope(),
|
||||
));
|
||||
}
|
||||
unreviewed_changes
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx)
|
||||
})?;
|
||||
loop {
|
||||
futures::select_biased! {
|
||||
buffer_update = buffer_updates.next() => {
|
||||
if let Some((author, buffer_snapshot)) = buffer_update {
|
||||
Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ = git_diff_updates_rx.changed().fuse() => {
|
||||
if let Some(git_diff) = git_diff.as_ref() {
|
||||
Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(&buffer)
|
||||
.context("buffer not tracked")?;
|
||||
tracked_buffer.diff_base = new_diff_base;
|
||||
tracked_buffer.snapshot = buffer_snapshot;
|
||||
tracked_buffer.unreviewed_changes = unreviewed_changes;
|
||||
cx.notify();
|
||||
anyhow::Ok(())
|
||||
})??;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn track_edits(
|
||||
this: &WeakEntity<ActionLog>,
|
||||
buffer: &Entity<Buffer>,
|
||||
author: ChangeAuthor,
|
||||
buffer_snapshot: text::BufferSnapshot,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let rebase = this.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
|
||||
let rebase = cx.background_spawn({
|
||||
let mut base_text = tracked_buffer.diff_base.clone();
|
||||
let old_snapshot = tracked_buffer.snapshot.clone();
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
async move {
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
edits,
|
||||
&mut base_text,
|
||||
new_snapshot.as_rope(),
|
||||
);
|
||||
}
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(rebase)
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
Self::update_diff(
|
||||
this,
|
||||
buffer,
|
||||
buffer_snapshot,
|
||||
new_base_text,
|
||||
new_diff_base,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn keep_committed_edits(
|
||||
this: &WeakEntity<ActionLog>,
|
||||
buffer: &Entity<Buffer>,
|
||||
git_diff: &Entity<BufferDiff>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let buffer_snapshot = this.read_with(cx, |this, _cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
anyhow::Ok(tracked_buffer.snapshot.clone())
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = this
|
||||
.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let agent_diff_base = tracked_buffer.diff_base.clone();
|
||||
let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
|
||||
let buffer_text = tracked_buffer.snapshot.as_rope().clone();
|
||||
anyhow::Ok(cx.background_spawn(async move {
|
||||
let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
|
||||
let committed_edits = language::line_diff(
|
||||
&agent_diff_base.to_string(),
|
||||
&git_diff_base.to_string(),
|
||||
)
|
||||
.into_iter()
|
||||
.map(|(old, new)| Edit { old, new });
|
||||
|
||||
let mut new_agent_diff_base = agent_diff_base.clone();
|
||||
let mut row_delta = 0i32;
|
||||
for committed in committed_edits {
|
||||
while let Some(unreviewed) = old_unreviewed_edits.peek() {
|
||||
// If the committed edit matches the unreviewed
|
||||
// edit, assume the user wants to keep it.
|
||||
if committed.old == unreviewed.old {
|
||||
let unreviewed_new =
|
||||
buffer_text.slice_rows(unreviewed.new.clone()).to_string();
|
||||
let committed_new =
|
||||
git_diff_base.slice_rows(committed.new.clone()).to_string();
|
||||
if unreviewed_new == committed_new {
|
||||
let old_byte_start =
|
||||
new_agent_diff_base.point_to_offset(Point::new(
|
||||
(unreviewed.old.start as i32 + row_delta) as u32,
|
||||
0,
|
||||
));
|
||||
let old_byte_end =
|
||||
new_agent_diff_base.point_to_offset(cmp::min(
|
||||
Point::new(
|
||||
(unreviewed.old.end as i32 + row_delta) as u32,
|
||||
0,
|
||||
),
|
||||
new_agent_diff_base.max_point(),
|
||||
));
|
||||
new_agent_diff_base
|
||||
.replace(old_byte_start..old_byte_end, &unreviewed_new);
|
||||
row_delta +=
|
||||
unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
|
||||
}
|
||||
} else if unreviewed.old.start >= committed.old.end {
|
||||
break;
|
||||
}
|
||||
|
||||
old_unreviewed_edits.next().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
Arc::new(new_agent_diff_base.to_string()),
|
||||
new_agent_diff_base,
|
||||
)
|
||||
}))
|
||||
})??
|
||||
.await;
|
||||
|
||||
Self::update_diff(
|
||||
this,
|
||||
buffer,
|
||||
buffer_snapshot,
|
||||
new_base_text,
|
||||
new_diff_base,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn update_diff(
|
||||
this: &WeakEntity<ActionLog>,
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_snapshot: text::BufferSnapshot,
|
||||
new_base_text: Arc<String>,
|
||||
new_diff_base: Rope,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
anyhow::Ok((
|
||||
tracked_buffer.diff.clone(),
|
||||
buffer.read(cx).language().cloned(),
|
||||
buffer.read(cx).language_registry().clone(),
|
||||
))
|
||||
})??;
|
||||
let diff_snapshot = BufferDiff::update_diff(
|
||||
diff.clone(),
|
||||
buffer_snapshot.clone(),
|
||||
Some(new_base_text),
|
||||
true,
|
||||
false,
|
||||
language,
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
let mut unreviewed_edits = Patch::default();
|
||||
if let Ok(diff_snapshot) = diff_snapshot {
|
||||
unreviewed_edits = cx
|
||||
.background_spawn({
|
||||
let diff_snapshot = diff_snapshot.clone();
|
||||
let buffer_snapshot = buffer_snapshot.clone();
|
||||
let new_diff_base = new_diff_base.clone();
|
||||
async move {
|
||||
let mut unreviewed_edits = Patch::default();
|
||||
for hunk in diff_snapshot
|
||||
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
|
||||
{
|
||||
let old_range = new_diff_base
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
let new_range = hunk.range.start..hunk.range.end;
|
||||
unreviewed_edits.push(point_to_row_edit(
|
||||
Edit {
|
||||
old: old_range,
|
||||
new: new_range,
|
||||
},
|
||||
&new_diff_base,
|
||||
&buffer_snapshot.as_rope(),
|
||||
));
|
||||
}
|
||||
unreviewed_edits
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
|
||||
})?;
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
tracked_buffer.diff_base = new_diff_base;
|
||||
tracked_buffer.snapshot = buffer_snapshot;
|
||||
tracked_buffer.unreviewed_edits = unreviewed_edits;
|
||||
cx.notify();
|
||||
anyhow::Ok(())
|
||||
})?
|
||||
}
|
||||
|
||||
/// Track a buffer as read, so we can notify the model about user edits.
|
||||
pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.track_buffer_internal(buffer, false, cx);
|
||||
@@ -350,7 +516,7 @@ impl ActionLog {
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
let mut delta = 0i32;
|
||||
|
||||
tracked_buffer.unreviewed_changes.retain_mut(|edit| {
|
||||
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
|
||||
edit.old.start = (edit.old.start as i32 + delta) as u32;
|
||||
edit.old.end = (edit.old.end as i32 + delta) as u32;
|
||||
|
||||
@@ -461,7 +627,7 @@ impl ActionLog {
|
||||
.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
|
||||
// Clear all tracked changes for this buffer and start over as if we just read it.
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
@@ -477,7 +643,7 @@ impl ActionLog {
|
||||
.peekable();
|
||||
|
||||
let mut edits_to_revert = Vec::new();
|
||||
for edit in tracked_buffer.unreviewed_changes.edits() {
|
||||
for edit in tracked_buffer.unreviewed_edits.edits() {
|
||||
let new_range = tracked_buffer
|
||||
.snapshot
|
||||
.anchor_before(Point::new(edit.new.start, 0))
|
||||
@@ -529,7 +695,7 @@ impl ActionLog {
|
||||
.retain(|_buffer, tracked_buffer| match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
tracked_buffer.unreviewed_changes.clear();
|
||||
tracked_buffer.unreviewed_edits.clear();
|
||||
tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
@@ -538,11 +704,11 @@ impl ActionLog {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
/// Returns the set of buffers that contain changes that haven't been reviewed by the user.
|
||||
/// Returns the set of buffers that contain edits that haven't been reviewed by the user.
|
||||
pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
|
||||
self.tracked_buffers
|
||||
.iter()
|
||||
.filter(|(_, tracked)| tracked.has_changes(cx))
|
||||
.filter(|(_, tracked)| tracked.has_edits(cx))
|
||||
.map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
|
||||
.collect()
|
||||
}
|
||||
@@ -662,11 +828,7 @@ fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edi
|
||||
old: edit.old.start.row + 1..edit.old.end.row + 1,
|
||||
new: edit.new.start.row + 1..edit.new.end.row + 1,
|
||||
}
|
||||
} else if edit.old.start.column == 0
|
||||
&& edit.old.end.column == 0
|
||||
&& edit.new.end.column == 0
|
||||
&& edit.old.end != old_text.max_point()
|
||||
{
|
||||
} else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
|
||||
Edit {
|
||||
old: edit.old.start.row..edit.old.end.row,
|
||||
new: edit.new.start.row..edit.new.end.row,
|
||||
@@ -694,7 +856,7 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
unreviewed_changes: Patch<u32>,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
@@ -706,7 +868,7 @@ struct TrackedBuffer {
|
||||
}
|
||||
|
||||
impl TrackedBuffer {
|
||||
fn has_changes(&self, cx: &App) -> bool {
|
||||
fn has_edits(&self, cx: &App) -> bool {
|
||||
self.diff
|
||||
.read(cx)
|
||||
.hunks(&self.buffer.read(cx), cx)
|
||||
@@ -727,8 +889,6 @@ pub struct ChangedBuffer {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
@@ -737,6 +897,7 @@ mod tests {
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::env;
|
||||
use util::{RandomCharIter, path};
|
||||
|
||||
#[ctor::ctor]
|
||||
@@ -1751,15 +1912,15 @@ mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
_ => {
|
||||
let is_agent_change = rng.gen_bool(0.5);
|
||||
if is_agent_change {
|
||||
let is_agent_edit = rng.gen_bool(0.5);
|
||||
if is_agent_edit {
|
||||
log::info!("agent edit");
|
||||
} else {
|
||||
log::info!("user edit");
|
||||
}
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
|
||||
if is_agent_change {
|
||||
if is_agent_edit {
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
}
|
||||
});
|
||||
@@ -1784,7 +1945,7 @@ mod tests {
|
||||
let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
|
||||
let mut old_text = tracked_buffer.diff_base.clone();
|
||||
let new_text = buffer.read(cx).as_rope();
|
||||
for edit in tracked_buffer.unreviewed_changes.edits() {
|
||||
for edit in tracked_buffer.unreviewed_edits.edits() {
|
||||
let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
let old_end = old_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.new.start + edit.old_len(), 0),
|
||||
@@ -1800,6 +1961,171 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
".git": {},
|
||||
"file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
|
||||
"0000000",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path(path!("/project/file.txt"), cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
// Edit at the very start: a -> A
|
||||
(Point::new(0, 0)..Point::new(0, 1), "A"),
|
||||
// Deletion in the middle: remove lines d and e
|
||||
(Point::new(3, 0)..Point::new(5, 0), ""),
|
||||
// Modification: g -> GGG
|
||||
(Point::new(6, 0)..Point::new(6, 1), "GGG"),
|
||||
// Addition: insert new line after h
|
||||
(Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
|
||||
// Edit the very last character: j -> J
|
||||
(Point::new(9, 0)..Point::new(9, 1), "J"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(0, 0)..Point::new(1, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "a\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(3, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Deleted,
|
||||
old_text: "d\ne\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(4, 0)..Point::new(5, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "g\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(6, 0)..Point::new(7, 0),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(8, 0)..Point::new(8, 1),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "j".into()
|
||||
}
|
||||
]
|
||||
)]
|
||||
);
|
||||
|
||||
// Simulate a git commit that matches some edits but not others:
|
||||
// - Accepts the first edit (a -> A)
|
||||
// - Accepts the deletion (remove d and e)
|
||||
// - Makes a different change to g (g -> G instead of GGG)
|
||||
// - Ignores the NEW line addition
|
||||
// - Ignores the last line edit (j stays as j)
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
|
||||
"0000001",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(4, 0)..Point::new(5, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "g\n".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(6, 0)..Point::new(7, 0),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(8, 0)..Point::new(8, 1),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "j".into()
|
||||
}
|
||||
]
|
||||
)]
|
||||
);
|
||||
|
||||
// Make another commit that accepts the NEW line but with different content
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[(
|
||||
"file.txt".into(),
|
||||
"A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
|
||||
)],
|
||||
"0000002",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![
|
||||
HunkStatus {
|
||||
range: Point::new(6, 0)..Point::new(7, 0),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into()
|
||||
},
|
||||
HunkStatus {
|
||||
range: Point::new(8, 0)..Point::new(8, 1),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "j".into()
|
||||
}
|
||||
]
|
||||
)]
|
||||
);
|
||||
|
||||
// Final commit that accepts all remaining edits
|
||||
fs.set_head_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
|
||||
"0000003",
|
||||
);
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct HunkStatus {
|
||||
range: Range<Point>,
|
||||
|
||||
@@ -214,10 +214,13 @@ pub trait Tool: 'static + Send + Sync {
|
||||
ToolSource::Native
|
||||
}
|
||||
|
||||
/// Returns true iff the tool needs the users's confirmation
|
||||
/// Returns true if the tool needs the users's confirmation
|
||||
/// before having permission to run.
|
||||
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
|
||||
|
||||
/// Returns true if the tool may perform edits.
|
||||
fn may_perform_edits(&self) -> bool;
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self, _: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
Ok(serde_json::Value::Object(serde_json::Map::default()))
|
||||
|
||||
@@ -16,11 +16,24 @@ pub fn adapt_schema_to_format(
|
||||
}
|
||||
|
||||
match format {
|
||||
LanguageModelToolSchemaFormat::JsonSchema => Ok(()),
|
||||
LanguageModelToolSchemaFormat::JsonSchema => preprocess_json_schema(json),
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset => adapt_to_json_schema_subset(json),
|
||||
}
|
||||
}
|
||||
|
||||
fn preprocess_json_schema(json: &mut Value) -> Result<()> {
|
||||
// `additionalProperties` defaults to `false` unless explicitly specified.
|
||||
// This prevents models from hallucinating tool parameters.
|
||||
if let Value::Object(obj) = json {
|
||||
if let Some(Value::String(type_str)) = obj.get("type") {
|
||||
if type_str == "object" && !obj.contains_key("additionalProperties") {
|
||||
obj.insert("additionalProperties".to_string(), Value::Bool(false));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tries to adapt the json schema so that it is compatible with https://ai.google.dev/api/caching#Schema
|
||||
fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
||||
if let Value::Object(obj) = json {
|
||||
@@ -33,15 +46,19 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
||||
);
|
||||
}
|
||||
|
||||
const KEYS_TO_REMOVE: [&str; 5] = [
|
||||
"format",
|
||||
"additionalProperties",
|
||||
"exclusiveMinimum",
|
||||
"exclusiveMaximum",
|
||||
"optional",
|
||||
const KEYS_TO_REMOVE: [(&str, fn(&Value) -> bool); 5] = [
|
||||
("format", |value| value.is_string()),
|
||||
("additionalProperties", |value| value.is_boolean()),
|
||||
("exclusiveMinimum", |value| value.is_number()),
|
||||
("exclusiveMaximum", |value| value.is_number()),
|
||||
("optional", |value| value.is_boolean()),
|
||||
];
|
||||
for key in KEYS_TO_REMOVE {
|
||||
obj.remove(key);
|
||||
for (key, predicate) in KEYS_TO_REMOVE {
|
||||
if let Some(value) = obj.get(key) {
|
||||
if predicate(value) {
|
||||
obj.remove(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If a type is not specified for an input parameter, add a default type
|
||||
@@ -140,6 +157,24 @@ mod tests {
|
||||
"type": "integer"
|
||||
})
|
||||
);
|
||||
|
||||
// Ensure that we do not remove keys that are actually supported (e.g. "format" can just be used as another property)
|
||||
let mut json = json!({
|
||||
"description": "A test field",
|
||||
"type": "integer",
|
||||
"format": {},
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"description": "A test field",
|
||||
"type": "integer",
|
||||
"format": {},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -237,4 +272,59 @@ mod tests {
|
||||
|
||||
assert!(adapt_to_json_schema_subset(&mut json).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preprocess_json_schema_adds_additional_properties() {
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
preprocess_json_schema(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preprocess_json_schema_preserves_additional_properties() {
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
});
|
||||
|
||||
preprocess_json_schema(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use gpui::{App, Context, EventEmitter};
|
||||
use collections::{HashMap, IndexMap};
|
||||
use gpui::App;
|
||||
|
||||
use crate::{Tool, ToolRegistry, ToolSource};
|
||||
|
||||
@@ -13,17 +13,9 @@ pub struct ToolId(usize);
|
||||
pub struct ToolWorkingSet {
|
||||
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
|
||||
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
|
||||
enabled_sources: HashSet<ToolSource>,
|
||||
enabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>,
|
||||
next_tool_id: ToolId,
|
||||
}
|
||||
|
||||
pub enum ToolWorkingSetEvent {
|
||||
EnabledToolsChanged,
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolWorkingSetEvent> for ToolWorkingSet {}
|
||||
|
||||
impl ToolWorkingSet {
|
||||
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
|
||||
self.context_server_tools_by_name
|
||||
@@ -57,42 +49,6 @@ impl ToolWorkingSet {
|
||||
tools_by_source
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let all_tools = self.tools(cx);
|
||||
|
||||
all_tools
|
||||
.into_iter()
|
||||
.filter(|tool| self.is_enabled(&tool.source(), &tool.name().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn disable_all_tools(&mut self, cx: &mut Context<Self>) {
|
||||
self.enabled_tools_by_source.clear();
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn enable_source(&mut self, source: ToolSource, cx: &mut Context<Self>) {
|
||||
self.enabled_sources.insert(source.clone());
|
||||
|
||||
let tools_by_source = self.tools_by_source(cx);
|
||||
if let Some(tools) = tools_by_source.get(&source) {
|
||||
self.enabled_tools_by_source.insert(
|
||||
source,
|
||||
tools
|
||||
.into_iter()
|
||||
.map(|tool| tool.name().into())
|
||||
.collect::<HashSet<_>>(),
|
||||
);
|
||||
}
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn disable_source(&mut self, source: &ToolSource, cx: &mut Context<Self>) {
|
||||
self.enabled_sources.remove(source);
|
||||
self.enabled_tools_by_source.remove(source);
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, tool: Arc<dyn Tool>) -> ToolId {
|
||||
let tool_id = self.next_tool_id;
|
||||
self.next_tool_id.0 += 1;
|
||||
@@ -102,42 +58,6 @@ impl ToolWorkingSet {
|
||||
tool_id
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.enabled_tools_by_source
|
||||
.get(source)
|
||||
.map_or(false, |enabled_tools| enabled_tools.contains(name))
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
!self.is_enabled(source, name)
|
||||
}
|
||||
|
||||
pub fn enable(
|
||||
&mut self,
|
||||
source: ToolSource,
|
||||
tools_to_enable: &[Arc<str>],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.extend(tools_to_enable.into_iter().cloned());
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn disable(
|
||||
&mut self,
|
||||
source: ToolSource,
|
||||
tools_to_disable: &[Arc<str>],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.retain(|name| !tools_to_disable.contains(name));
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, tool_ids_to_remove: &[ToolId]) {
|
||||
self.context_server_tools_by_id
|
||||
.retain(|id, _| !tool_ids_to_remove.contains(id));
|
||||
|
||||
@@ -18,7 +18,6 @@ eval = []
|
||||
agent_settings.workspace = true
|
||||
anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-watch.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -58,6 +57,7 @@ terminal_view.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
web_search.workspace = true
|
||||
which.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
@@ -80,6 +80,7 @@ rand.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
smol.workspace = true
|
||||
task = { workspace = true, features = ["test-support"]}
|
||||
tempfile.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
@@ -37,13 +37,13 @@ use crate::diagnostics_tool::DiagnosticsTool;
|
||||
use crate::edit_file_tool::EditFileTool;
|
||||
use crate::fetch_tool::FetchTool;
|
||||
use crate::find_path_tool::FindPathTool;
|
||||
use crate::grep_tool::GrepTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::thinking_tool::ThinkingTool;
|
||||
|
||||
pub use edit_file_tool::{EditFileMode, EditFileToolInput};
|
||||
pub use find_path_tool::FindPathToolInput;
|
||||
pub use grep_tool::{GrepTool, GrepToolInput};
|
||||
pub use open_tool::OpenTool;
|
||||
pub use read_file_tool::{ReadFileTool, ReadFileToolInput};
|
||||
pub use terminal_tool::TerminalTool;
|
||||
@@ -126,6 +126,7 @@ mod tests {
|
||||
}
|
||||
},
|
||||
"required": ["location"],
|
||||
"additionalProperties": false
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -48,6 +48,10 @@ impl Tool for CopyPathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./copy_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -33,12 +33,16 @@ impl Tool for CreateDirectoryTool {
|
||||
"create_directory".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./create_directory_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./create_directory_tool/description.md").into()
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
|
||||
@@ -37,6 +37,10 @@ impl Tool for DeletePathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./delete_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -50,6 +50,10 @@ impl Tool for DiagnosticsTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./diagnostics_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ impl Template for EditFilePromptTemplate {
|
||||
pub enum EditAgentOutputEvent {
|
||||
ResolvingEditRange(Range<Anchor>),
|
||||
UnresolvedEditRange,
|
||||
AmbiguousEditRange(Vec<Range<usize>>),
|
||||
Edited,
|
||||
}
|
||||
|
||||
@@ -269,16 +270,29 @@ impl EditAgent {
|
||||
}
|
||||
}
|
||||
|
||||
let (edit_events_, resolved_old_text) = resolve_old_text.await?;
|
||||
let (edit_events_, mut resolved_old_text) = resolve_old_text.await?;
|
||||
edit_events = edit_events_;
|
||||
|
||||
// If we can't resolve the old text, restart the loop waiting for a
|
||||
// new edit (or for the stream to end).
|
||||
let Some(resolved_old_text) = resolved_old_text else {
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::UnresolvedEditRange)
|
||||
.ok();
|
||||
continue;
|
||||
let resolved_old_text = match resolved_old_text.len() {
|
||||
1 => resolved_old_text.pop().unwrap(),
|
||||
0 => {
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::UnresolvedEditRange)
|
||||
.ok();
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
let ranges = resolved_old_text
|
||||
.into_iter()
|
||||
.map(|text| text.range)
|
||||
.collect();
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::AmbiguousEditRange(ranges))
|
||||
.ok();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Compute edits in the background and apply them as they become
|
||||
@@ -405,13 +419,13 @@ impl EditAgent {
|
||||
mut edit_events: T,
|
||||
cx: &mut AsyncApp,
|
||||
) -> (
|
||||
Task<Result<(T, Option<ResolvedOldText>)>>,
|
||||
async_watch::Receiver<Option<Range<usize>>>,
|
||||
Task<Result<(T, Vec<ResolvedOldText>)>>,
|
||||
watch::Receiver<Option<Range<usize>>>,
|
||||
)
|
||||
where
|
||||
T: 'static + Send + Unpin + Stream<Item = Result<EditParserEvent>>,
|
||||
{
|
||||
let (old_range_tx, old_range_rx) = async_watch::channel(None);
|
||||
let (mut old_range_tx, old_range_rx) = watch::channel(None);
|
||||
let task = cx.background_spawn(async move {
|
||||
let mut matcher = StreamingFuzzyMatcher::new(snapshot);
|
||||
while let Some(edit_event) = edit_events.next().await {
|
||||
@@ -425,21 +439,29 @@ impl EditAgent {
|
||||
}
|
||||
}
|
||||
|
||||
let old_range = matcher.finish();
|
||||
old_range_tx.send(old_range.clone())?;
|
||||
if let Some(old_range) = old_range {
|
||||
let line_indent =
|
||||
LineIndent::from_iter(matcher.query_lines().first().unwrap().chars());
|
||||
Ok((
|
||||
edit_events,
|
||||
Some(ResolvedOldText {
|
||||
range: old_range,
|
||||
indent: line_indent,
|
||||
}),
|
||||
))
|
||||
let matches = matcher.finish();
|
||||
|
||||
let old_range = if matches.len() == 1 {
|
||||
matches.first()
|
||||
} else {
|
||||
Ok((edit_events, None))
|
||||
}
|
||||
// No matches or multiple ambiguous matches
|
||||
None
|
||||
};
|
||||
old_range_tx.send(old_range.cloned())?;
|
||||
|
||||
let indent = LineIndent::from_iter(
|
||||
matcher
|
||||
.query_lines()
|
||||
.first()
|
||||
.unwrap_or(&String::new())
|
||||
.chars(),
|
||||
);
|
||||
let resolved_old_texts = matches
|
||||
.into_iter()
|
||||
.map(|range| ResolvedOldText { range, indent })
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok((edit_events, resolved_old_texts))
|
||||
});
|
||||
|
||||
(task, old_range_rx)
|
||||
@@ -1322,6 +1344,76 @@ mod tests {
|
||||
EditAgent::new(model, project, action_log, Templates::new())
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_non_unique_text_error(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
let agent = init_test(cx).await;
|
||||
let original_text = indoc! {"
|
||||
function foo() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function bar() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function baz() {
|
||||
return 42;
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new(|cx| Buffer::local(original_text, cx));
|
||||
let (apply, mut events) = agent.edit(
|
||||
buffer.clone(),
|
||||
String::new(),
|
||||
&LanguageModelRequest::default(),
|
||||
&mut cx.to_async(),
|
||||
);
|
||||
cx.run_until_parked();
|
||||
|
||||
// When <old_text> matches text in more than one place
|
||||
simulate_llm_output(
|
||||
&agent,
|
||||
indoc! {"
|
||||
<old_text>
|
||||
return 42;
|
||||
</old_text>
|
||||
<new_text>
|
||||
return 100;
|
||||
</new_text>
|
||||
"},
|
||||
&mut rng,
|
||||
cx,
|
||||
);
|
||||
apply.await.unwrap();
|
||||
|
||||
// Then the text should remain unchanged
|
||||
let result_text = buffer.read_with(cx, |buffer, _| buffer.snapshot().text());
|
||||
assert_eq!(
|
||||
result_text,
|
||||
indoc! {"
|
||||
function foo() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function bar() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
function baz() {
|
||||
return 42;
|
||||
}
|
||||
"},
|
||||
"Text should remain unchanged when there are multiple matches"
|
||||
);
|
||||
|
||||
// And AmbiguousEditRange even should be emitted
|
||||
let events = drain_events(&mut events);
|
||||
let ambiguous_ranges = vec![17..31, 52..66, 87..101];
|
||||
assert!(
|
||||
events.contains(&EditAgentOutputEvent::AmbiguousEditRange(ambiguous_ranges)),
|
||||
"Should emit AmbiguousEditRange for non-unique text"
|
||||
);
|
||||
}
|
||||
|
||||
fn drain_events(
|
||||
stream: &mut UnboundedReceiver<EditAgentOutputEvent>,
|
||||
) -> Vec<EditAgentOutputEvent> {
|
||||
|
||||
@@ -11,7 +11,7 @@ use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use fs::FakeFs;
|
||||
use futures::{FutureExt, future::LocalBoxFuture};
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use gpui::{AppContext, TestAppContext, Timer};
|
||||
use indoc::{formatdoc, indoc};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
@@ -39,7 +39,7 @@ fn eval_extract_handle_command_output() {
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
// claude-3.7-sonnet | 0.98
|
||||
// gemini-2.5-pro | 0.86
|
||||
// gemini-2.5-pro-06-05 | 0.77
|
||||
// gemini-2.5-flash | 0.11
|
||||
// gpt-4.1 | 1.00
|
||||
|
||||
@@ -58,6 +58,7 @@ fn eval_extract_handle_command_output() {
|
||||
eval(
|
||||
100,
|
||||
0.7, // Taking the lower bar for Gemini
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -116,6 +117,7 @@ fn eval_delete_run_git_blame() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -178,6 +180,7 @@ fn eval_translate_doc_comments() {
|
||||
eval(
|
||||
200,
|
||||
1.,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -241,6 +244,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -365,6 +369,7 @@ fn eval_disable_cursor_blinking() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Let's research how to cursor blinking works.")]),
|
||||
@@ -448,6 +453,9 @@ fn eval_from_pixels_constructor() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
// For whatever reason, this eval produces more mismatched tags.
|
||||
// Increasing for now, let's see if we can bring this down.
|
||||
0.2,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -648,6 +656,7 @@ fn eval_zode() {
|
||||
eval(
|
||||
50,
|
||||
1.,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]),
|
||||
@@ -754,6 +763,7 @@ fn eval_add_overwrite_test() {
|
||||
eval(
|
||||
200,
|
||||
0.5, // TODO: make this eval better
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -993,6 +1003,7 @@ fn eval_create_empty_file() {
|
||||
eval(
|
||||
100,
|
||||
0.99,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Create a second empty todo file ")]),
|
||||
@@ -1244,9 +1255,12 @@ impl EvalAssertion {
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let mut response = judge
|
||||
.stream_completion_text(request, &cx.to_async())
|
||||
.await?;
|
||||
let mut response = retry_on_rate_limit(async || {
|
||||
Ok(judge
|
||||
.stream_completion_text(request.clone(), &cx.to_async())
|
||||
.await?)
|
||||
})
|
||||
.await?;
|
||||
let mut output = String::new();
|
||||
while let Some(chunk) = response.stream.next().await {
|
||||
let chunk = chunk?;
|
||||
@@ -1279,7 +1293,12 @@ impl EvalAssertion {
|
||||
}
|
||||
}
|
||||
|
||||
fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
|
||||
fn eval(
|
||||
iterations: usize,
|
||||
expected_pass_ratio: f32,
|
||||
mismatched_tag_threshold: f32,
|
||||
mut eval: EvalInput,
|
||||
) {
|
||||
let mut evaluated_count = 0;
|
||||
let mut failed_count = 0;
|
||||
report_progress(evaluated_count, failed_count, iterations);
|
||||
@@ -1292,10 +1311,17 @@ fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
|
||||
run_eval(eval.clone(), tx.clone());
|
||||
|
||||
let executor = gpui::background_executor();
|
||||
let semaphore = Arc::new(smol::lock::Semaphore::new(32));
|
||||
for _ in 1..iterations {
|
||||
let eval = eval.clone();
|
||||
let tx = tx.clone();
|
||||
executor.spawn(async move { run_eval(eval, tx) }).detach();
|
||||
let semaphore = semaphore.clone();
|
||||
executor
|
||||
.spawn(async move {
|
||||
let _guard = semaphore.acquire().await;
|
||||
run_eval(eval, tx)
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
drop(tx);
|
||||
|
||||
@@ -1351,7 +1377,7 @@ fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
|
||||
|
||||
let mismatched_tag_ratio =
|
||||
cumulative_parser_metrics.mismatched_tags as f32 / cumulative_parser_metrics.tags as f32;
|
||||
if mismatched_tag_ratio > 0.05 {
|
||||
if mismatched_tag_ratio > mismatched_tag_threshold {
|
||||
for eval_output in eval_outputs {
|
||||
println!("{}", eval_output);
|
||||
}
|
||||
@@ -1561,21 +1587,31 @@ impl EditAgentTest {
|
||||
if let Some(input_content) = eval.input_content.as_deref() {
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(input_content, cx));
|
||||
}
|
||||
let (edit_output, _) = self.agent.edit(
|
||||
buffer.clone(),
|
||||
eval.edit_file_input.display_description,
|
||||
&conversation,
|
||||
&mut cx.to_async(),
|
||||
);
|
||||
edit_output.await?
|
||||
retry_on_rate_limit(async || {
|
||||
self.agent
|
||||
.edit(
|
||||
buffer.clone(),
|
||||
eval.edit_file_input.display_description.clone(),
|
||||
&conversation,
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.0
|
||||
.await
|
||||
})
|
||||
.await?
|
||||
} else {
|
||||
let (edit_output, _) = self.agent.overwrite(
|
||||
buffer.clone(),
|
||||
eval.edit_file_input.display_description,
|
||||
&conversation,
|
||||
&mut cx.to_async(),
|
||||
);
|
||||
edit_output.await?
|
||||
retry_on_rate_limit(async || {
|
||||
self.agent
|
||||
.overwrite(
|
||||
buffer.clone(),
|
||||
eval.edit_file_input.display_description.clone(),
|
||||
&conversation,
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.0
|
||||
.await
|
||||
})
|
||||
.await?
|
||||
};
|
||||
|
||||
let buffer_text = buffer.read_with(cx, |buffer, _| buffer.text());
|
||||
@@ -1597,6 +1633,26 @@ impl EditAgentTest {
|
||||
}
|
||||
}
|
||||
|
||||
async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) -> Result<R> {
|
||||
loop {
|
||||
match request().await {
|
||||
Ok(result) => return Ok(result),
|
||||
Err(err) => match err.downcast::<LanguageModelCompletionError>() {
|
||||
Ok(err) => match err {
|
||||
LanguageModelCompletionError::RateLimit(duration) => {
|
||||
// Wait until after we are allowed to try again
|
||||
eprintln!("Rate limit exceeded. Waiting for {duration:?}...",);
|
||||
Timer::after(duration).await;
|
||||
continue;
|
||||
}
|
||||
_ => return Err(err.into()),
|
||||
},
|
||||
Err(err) => return Err(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
struct EvalAssertionOutcome {
|
||||
score: usize,
|
||||
|
||||
@@ -498,7 +498,7 @@ client.with_options(max_retries=5).messages.create(
|
||||
### Timeouts
|
||||
|
||||
By default requests time out after 10 minutes. You can configure this with a `timeout` option,
|
||||
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object:
|
||||
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:
|
||||
|
||||
```python
|
||||
from anthropic import Anthropic
|
||||
|
||||
@@ -11,7 +11,7 @@ pub struct StreamingFuzzyMatcher {
|
||||
snapshot: TextBufferSnapshot,
|
||||
query_lines: Vec<String>,
|
||||
incomplete_line: String,
|
||||
best_match: Option<Range<usize>>,
|
||||
best_matches: Vec<Range<usize>>,
|
||||
matrix: SearchMatrix,
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ impl StreamingFuzzyMatcher {
|
||||
snapshot,
|
||||
query_lines: Vec::new(),
|
||||
incomplete_line: String::new(),
|
||||
best_match: None,
|
||||
best_matches: Vec::new(),
|
||||
matrix: SearchMatrix::new(buffer_line_count + 1),
|
||||
}
|
||||
}
|
||||
@@ -55,31 +55,41 @@ impl StreamingFuzzyMatcher {
|
||||
|
||||
self.incomplete_line.replace_range(..last_pos + 1, "");
|
||||
|
||||
self.best_match = self.resolve_location_fuzzy();
|
||||
}
|
||||
self.best_matches = self.resolve_location_fuzzy();
|
||||
|
||||
self.best_match.clone()
|
||||
if let Some(first_match) = self.best_matches.first() {
|
||||
Some(first_match.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
if let Some(first_match) = self.best_matches.first() {
|
||||
Some(first_match.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Finish processing and return the final best match.
|
||||
/// Finish processing and return the final best match(es).
|
||||
///
|
||||
/// This processes any remaining incomplete line before returning the final
|
||||
/// match result.
|
||||
pub fn finish(&mut self) -> Option<Range<usize>> {
|
||||
pub fn finish(&mut self) -> Vec<Range<usize>> {
|
||||
// Process any remaining incomplete line
|
||||
if !self.incomplete_line.is_empty() {
|
||||
self.query_lines.push(self.incomplete_line.clone());
|
||||
self.best_match = self.resolve_location_fuzzy();
|
||||
self.incomplete_line.clear();
|
||||
self.best_matches = self.resolve_location_fuzzy();
|
||||
}
|
||||
|
||||
self.best_match.clone()
|
||||
self.best_matches.clone()
|
||||
}
|
||||
|
||||
fn resolve_location_fuzzy(&mut self) -> Option<Range<usize>> {
|
||||
fn resolve_location_fuzzy(&mut self) -> Vec<Range<usize>> {
|
||||
let new_query_line_count = self.query_lines.len();
|
||||
let old_query_line_count = self.matrix.rows.saturating_sub(1);
|
||||
if new_query_line_count == old_query_line_count {
|
||||
return None;
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
self.matrix.resize_rows(new_query_line_count + 1);
|
||||
@@ -132,53 +142,61 @@ impl StreamingFuzzyMatcher {
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
// Find all matches with the best cost
|
||||
let buffer_line_count = self.snapshot.max_point().row as usize + 1;
|
||||
let mut buffer_row_end = buffer_line_count as u32;
|
||||
let mut best_cost = u32::MAX;
|
||||
let mut matches_with_best_cost = Vec::new();
|
||||
|
||||
for col in 1..=buffer_line_count {
|
||||
let cost = self.matrix.get(new_query_line_count, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
buffer_row_end = col as u32;
|
||||
matches_with_best_cost.clear();
|
||||
matches_with_best_cost.push(col as u32);
|
||||
} else if cost == best_cost {
|
||||
matches_with_best_cost.push(col as u32);
|
||||
}
|
||||
}
|
||||
|
||||
let mut matched_lines = 0;
|
||||
let mut query_row = new_query_line_count;
|
||||
let mut buffer_row_start = buffer_row_end;
|
||||
while query_row > 0 && buffer_row_start > 0 {
|
||||
let current = self.matrix.get(query_row, buffer_row_start as usize);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_row -= 1;
|
||||
buffer_row_start -= 1;
|
||||
matched_lines += 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_row -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_row_start -= 1;
|
||||
// Find ranges for the matches
|
||||
let mut valid_matches = Vec::new();
|
||||
for &buffer_row_end in &matches_with_best_cost {
|
||||
let mut matched_lines = 0;
|
||||
let mut query_row = new_query_line_count;
|
||||
let mut buffer_row_start = buffer_row_end;
|
||||
while query_row > 0 && buffer_row_start > 0 {
|
||||
let current = self.matrix.get(query_row, buffer_row_start as usize);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_row -= 1;
|
||||
buffer_row_start -= 1;
|
||||
matched_lines += 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_row -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_row_start -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let matched_buffer_row_count = buffer_row_end - buffer_row_start;
|
||||
let matched_ratio = matched_lines as f32
|
||||
/ (matched_buffer_row_count as f32).max(new_query_line_count as f32);
|
||||
if matched_ratio >= 0.8 {
|
||||
let buffer_start_ix = self
|
||||
.snapshot
|
||||
.point_to_offset(Point::new(buffer_row_start, 0));
|
||||
let buffer_end_ix = self.snapshot.point_to_offset(Point::new(
|
||||
buffer_row_end - 1,
|
||||
self.snapshot.line_len(buffer_row_end - 1),
|
||||
));
|
||||
valid_matches.push((buffer_row_start, buffer_start_ix..buffer_end_ix));
|
||||
}
|
||||
}
|
||||
|
||||
let matched_buffer_row_count = buffer_row_end - buffer_row_start;
|
||||
let matched_ratio = matched_lines as f32
|
||||
/ (matched_buffer_row_count as f32).max(new_query_line_count as f32);
|
||||
if matched_ratio >= 0.8 {
|
||||
let buffer_start_ix = self
|
||||
.snapshot
|
||||
.point_to_offset(Point::new(buffer_row_start, 0));
|
||||
let buffer_end_ix = self.snapshot.point_to_offset(Point::new(
|
||||
buffer_row_end - 1,
|
||||
self.snapshot.line_len(buffer_row_end - 1),
|
||||
));
|
||||
Some(buffer_start_ix..buffer_end_ix)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
valid_matches.into_iter().map(|(_, range)| range).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -638,28 +656,35 @@ mod tests {
|
||||
matcher.push(chunk);
|
||||
}
|
||||
|
||||
let result = matcher.finish();
|
||||
let actual_ranges = matcher.finish();
|
||||
|
||||
// If no expected ranges, we expect no match
|
||||
if expected_ranges.is_empty() {
|
||||
assert_eq!(
|
||||
result, None,
|
||||
assert!(
|
||||
actual_ranges.is_empty(),
|
||||
"Expected no match for query: {:?}, but found: {:?}",
|
||||
query, result
|
||||
query,
|
||||
actual_ranges
|
||||
);
|
||||
} else {
|
||||
let mut actual_ranges = Vec::new();
|
||||
if let Some(range) = result {
|
||||
actual_ranges.push(range);
|
||||
}
|
||||
|
||||
let text_with_actual_range = generate_marked_text(&text, &actual_ranges, false);
|
||||
pretty_assertions::assert_eq!(
|
||||
text_with_actual_range,
|
||||
text_with_expected_range,
|
||||
"Query: {:?}, Chunks: {:?}",
|
||||
indoc! {"
|
||||
Query: {:?}
|
||||
Chunks: {:?}
|
||||
Expected marked text: {}
|
||||
Actual marked text: {}
|
||||
Expected ranges: {:?}
|
||||
Actual ranges: {:?}"
|
||||
},
|
||||
query,
|
||||
chunks
|
||||
chunks,
|
||||
text_with_expected_range,
|
||||
text_with_actual_range,
|
||||
expected_ranges,
|
||||
actual_ranges
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -687,8 +712,11 @@ mod tests {
|
||||
|
||||
fn finish(mut finder: StreamingFuzzyMatcher) -> Option<String> {
|
||||
let snapshot = finder.snapshot.clone();
|
||||
finder
|
||||
.finish()
|
||||
.map(|range| snapshot.text_for_range(range).collect::<String>())
|
||||
let matches = finder.finish();
|
||||
if let Some(range) = matches.first() {
|
||||
Some(snapshot.text_for_range(range.clone()).collect::<String>())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::{
|
||||
Templates,
|
||||
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
|
||||
schema::json_schema_for,
|
||||
ui::{COLLAPSED_LINES, ToolOutputPreview},
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{
|
||||
@@ -13,7 +14,7 @@ use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between,
|
||||
TextStyleRefinement, WeakEntity, pulsating_between, px,
|
||||
};
|
||||
use indoc::formatdoc;
|
||||
use language::{
|
||||
@@ -128,6 +129,10 @@ impl Tool for EditFileTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("edit_file_tool/description.md").to_string()
|
||||
}
|
||||
@@ -234,6 +239,7 @@ impl Tool for EditFileTool {
|
||||
};
|
||||
|
||||
let mut hallucinated_old_text = false;
|
||||
let mut ambiguous_ranges = Vec::new();
|
||||
while let Some(event) = events.next().await {
|
||||
match event {
|
||||
EditAgentOutputEvent::Edited => {
|
||||
@@ -242,6 +248,7 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
}
|
||||
EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true,
|
||||
EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges,
|
||||
EditAgentOutputEvent::ResolvingEditRange(range) => {
|
||||
if let Some(card) = card_clone.as_ref() {
|
||||
card.update(cx, |card, cx| card.reveal_range(range, cx))?;
|
||||
@@ -324,6 +331,17 @@ impl Tool for EditFileTool {
|
||||
I can perform the requested edits.
|
||||
"}
|
||||
);
|
||||
anyhow::ensure!(
|
||||
ambiguous_ranges.is_empty(),
|
||||
// TODO: Include ambiguous_ranges, converted to line numbers.
|
||||
// This would work best if we add `line_hint` parameter
|
||||
// to edit_file_tool
|
||||
formatdoc! {"
|
||||
<old_text> matches more than one position in the file. Read the
|
||||
relevant sections of {input_path} again and extend <old_text> so
|
||||
that I can perform the requested edits.
|
||||
"}
|
||||
);
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text("No edits were made.".into()),
|
||||
output: serde_json::to_value(output).ok(),
|
||||
@@ -884,30 +902,8 @@ impl ToolCard for EditFileToolCard {
|
||||
(element.into_any_element(), line_height)
|
||||
});
|
||||
|
||||
let (full_height_icon, full_height_tooltip_label) = if self.full_height_expanded {
|
||||
(IconName::ChevronUp, "Collapse Code Block")
|
||||
} else {
|
||||
(IconName::ChevronDown, "Expand Code Block")
|
||||
};
|
||||
|
||||
let gradient_overlay =
|
||||
div()
|
||||
.absolute()
|
||||
.bottom_0()
|
||||
.left_0()
|
||||
.w_full()
|
||||
.h_2_5()
|
||||
.bg(gpui::linear_gradient(
|
||||
0.,
|
||||
gpui::linear_color_stop(cx.theme().colors().editor_background, 0.),
|
||||
gpui::linear_color_stop(cx.theme().colors().editor_background.opacity(0.), 1.),
|
||||
));
|
||||
|
||||
let border_color = cx.theme().colors().border.opacity(0.6);
|
||||
|
||||
const DEFAULT_COLLAPSED_LINES: u32 = 10;
|
||||
let is_collapsible = self.total_lines.unwrap_or(0) > DEFAULT_COLLAPSED_LINES;
|
||||
|
||||
let waiting_for_diff = {
|
||||
let styles = [
|
||||
("w_4_5", (0.1, 0.85), 2000),
|
||||
@@ -992,48 +988,34 @@ impl ToolCard for EditFileToolCard {
|
||||
card.child(waiting_for_diff)
|
||||
})
|
||||
.when(self.preview_expanded && !self.is_loading(), |card| {
|
||||
let editor_view = v_flex()
|
||||
.relative()
|
||||
.h_full()
|
||||
.when(!self.full_height_expanded, |editor_container| {
|
||||
editor_container.max_h(px(COLLAPSED_LINES as f32 * editor_line_height.0))
|
||||
})
|
||||
.overflow_hidden()
|
||||
.border_t_1()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(editor);
|
||||
|
||||
card.child(
|
||||
v_flex()
|
||||
.relative()
|
||||
.h_full()
|
||||
.when(!self.full_height_expanded, |editor_container| {
|
||||
editor_container
|
||||
.max_h(DEFAULT_COLLAPSED_LINES as f32 * editor_line_height)
|
||||
})
|
||||
.overflow_hidden()
|
||||
.border_t_1()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(editor)
|
||||
.when(
|
||||
!self.full_height_expanded && is_collapsible,
|
||||
|editor_container| editor_container.child(gradient_overlay),
|
||||
),
|
||||
ToolOutputPreview::new(editor_view.into_any_element(), self.editor.entity_id())
|
||||
.with_total_lines(self.total_lines.unwrap_or(0) as usize)
|
||||
.toggle_state(self.full_height_expanded)
|
||||
.with_collapsed_fade()
|
||||
.on_toggle({
|
||||
let this = cx.entity().downgrade();
|
||||
move |is_expanded, _window, cx| {
|
||||
if let Some(this) = this.upgrade() {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.full_height_expanded = is_expanded;
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(is_collapsible, |card| {
|
||||
card.child(
|
||||
h_flex()
|
||||
.id(("expand-button", self.editor.entity_id()))
|
||||
.flex_none()
|
||||
.cursor_pointer()
|
||||
.h_5()
|
||||
.justify_center()
|
||||
.border_t_1()
|
||||
.rounded_b_md()
|
||||
.border_color(border_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.hover(|style| style.bg(cx.theme().colors().element_hover.opacity(0.1)))
|
||||
.child(
|
||||
Icon::new(full_height_icon)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.tooltip(Tooltip::text(full_height_tooltip_label))
|
||||
.on_click(cx.listener(move |this, _event, _window, _cx| {
|
||||
this.full_height_expanded = !this.full_height_expanded;
|
||||
})),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,7 +118,11 @@ impl Tool for FetchTool {
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
|
||||
@@ -59,6 +59,10 @@ impl Tool for FindPathTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./find_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
@@ -6,11 +6,12 @@ use gpui::{AnyWindowHandle, App, Entity, Task};
|
||||
use language::{OffsetRangeExt, ParseStatus, Point};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use project::{
|
||||
Project,
|
||||
Project, WorktreeSettings,
|
||||
search::{SearchQuery, SearchResult},
|
||||
};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::{cmp, fmt::Write, sync::Arc};
|
||||
use ui::IconName;
|
||||
use util::RangeExt;
|
||||
@@ -60,6 +61,10 @@ impl Tool for GrepTool {
|
||||
false
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./grep_tool/description.md").into()
|
||||
}
|
||||
@@ -126,6 +131,23 @@ impl Tool for GrepTool {
|
||||
}
|
||||
};
|
||||
|
||||
// Exclude global file_scan_exclusions and private_files settings
|
||||
let exclude_matcher = {
|
||||
let global_settings = WorktreeSettings::get_global(cx);
|
||||
let exclude_patterns = global_settings
|
||||
.file_scan_exclusions
|
||||
.sources()
|
||||
.iter()
|
||||
.chain(global_settings.private_files.sources().iter());
|
||||
|
||||
match PathMatcher::new(exclude_patterns) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("invalid exclude pattern: {error}"))).into();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let query = match SearchQuery::regex(
|
||||
&input.regex,
|
||||
false,
|
||||
@@ -133,7 +155,7 @@ impl Tool for GrepTool {
|
||||
false,
|
||||
false,
|
||||
include_matcher,
|
||||
PathMatcher::default(), // For now, keep it simple and don't enable an exclude pattern.
|
||||
exclude_matcher,
|
||||
true, // Always match file include pattern against *full project paths* that start with a project root.
|
||||
None,
|
||||
) {
|
||||
@@ -156,12 +178,24 @@ impl Tool for GrepTool {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (Some(path), mut parse_status) = buffer.read_with(cx, |buffer, cx| {
|
||||
let Ok((Some(path), mut parse_status)) = buffer.read_with(cx, |buffer, cx| {
|
||||
(buffer.file().map(|file| file.full_path(cx)), buffer.parse_status())
|
||||
})? else {
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Check if this file should be excluded based on its worktree settings
|
||||
if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| {
|
||||
project.find_project_path(&path, cx)
|
||||
}) {
|
||||
if cx.update(|cx| {
|
||||
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
|
||||
worktree_settings.is_path_excluded(&project_path.path)
|
||||
|| worktree_settings.is_path_private(&project_path.path)
|
||||
}).unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
@@ -280,10 +314,11 @@ impl Tool for GrepTool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use project::{FakeFs, Project, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use unindent::Unindent;
|
||||
use util::path;
|
||||
@@ -295,7 +330,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor().clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
serde_json::json!({
|
||||
"src": {
|
||||
"main.rs": "fn main() {\n println!(\"Hello, world!\");\n}",
|
||||
@@ -383,7 +418,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor().clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
serde_json::json!({
|
||||
"case_test.txt": "This file has UPPERCASE and lowercase text.\nUPPERCASE patterns should match only with case_sensitive: true",
|
||||
}),
|
||||
@@ -464,7 +499,7 @@ mod tests {
|
||||
|
||||
// Create test file with syntax structures
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
serde_json::json!({
|
||||
"test_syntax.rs": r#"
|
||||
fn top_level_function() {
|
||||
@@ -785,4 +820,488 @@ mod tests {
|
||||
.with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_grep_security_boundaries(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
fs.insert_tree(
|
||||
path!("/"),
|
||||
json!({
|
||||
"project_root": {
|
||||
"allowed_file.rs": "fn main() { println!(\"This file is in the project\"); }",
|
||||
".mysecrets": "SECRET_KEY=abc123\nfn secret() { /* private */ }",
|
||||
".secretdir": {
|
||||
"config": "fn special_configuration() { /* excluded */ }"
|
||||
},
|
||||
".mymetadata": "fn custom_metadata() { /* excluded */ }",
|
||||
"subdir": {
|
||||
"normal_file.rs": "fn normal_file_content() { /* Normal */ }",
|
||||
"special.privatekey": "fn private_key_content() { /* private */ }",
|
||||
"data.mysensitive": "fn sensitive_data() { /* private */ }"
|
||||
}
|
||||
},
|
||||
"outside_project": {
|
||||
"sensitive_file.rs": "fn outside_function() { /* This file is outside the project */ }"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.update(|cx| {
|
||||
use gpui::UpdateGlobal;
|
||||
use project::WorktreeSettings;
|
||||
use settings::SettingsStore;
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions = Some(vec![
|
||||
"**/.secretdir".to_string(),
|
||||
"**/.mymetadata".to_string(),
|
||||
]);
|
||||
settings.private_files = Some(vec![
|
||||
"**/.mysecrets".to_string(),
|
||||
"**/*.privatekey".to_string(),
|
||||
"**/*.mysensitive".to_string(),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
|
||||
// Searching for files outside the project worktree should return no results
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "outside_function"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not find files outside the project worktree"
|
||||
);
|
||||
|
||||
// Searching within the project should succeed
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "main"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("allowed_file.rs")),
|
||||
"grep_tool should be able to search files inside worktrees"
|
||||
);
|
||||
|
||||
// Searching files that match file_scan_exclusions should return no results
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "special_configuration"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search files in .secretdir (file_scan_exclusions)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "custom_metadata"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .mymetadata files (file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Searching private files should return no results
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "SECRET_KEY"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .mysecrets (private_files)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "private_key_content"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .privatekey files (private_files)"
|
||||
);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "sensitive_data"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not search .mysensitive files (private_files)"
|
||||
);
|
||||
|
||||
// Searching a normal file should still work, even with private_files configured
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "normal_file_content"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("normal_file.rs")),
|
||||
"Should be able to search normal files"
|
||||
);
|
||||
|
||||
// Path traversal attempts with .. in include_pattern should not escape project
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "outside_function",
|
||||
"include_pattern": "../outside_project/**/*.rs"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
let results = result.unwrap();
|
||||
let paths = extract_paths_from_results(&results.content.as_str().unwrap());
|
||||
assert!(
|
||||
paths.is_empty(),
|
||||
"grep_tool should not allow escaping project boundaries with relative paths"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_grep_with_multiple_worktree_settings(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
// Create first worktree with its own private files
|
||||
fs.insert_tree(
|
||||
path!("/worktree1"),
|
||||
json!({
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/fixture.*"],
|
||||
"private_files": ["**/secret.rs"]
|
||||
}"#
|
||||
},
|
||||
"src": {
|
||||
"main.rs": "fn main() { let secret_key = \"hidden\"; }",
|
||||
"secret.rs": "const API_KEY: &str = \"secret_value\";",
|
||||
"utils.rs": "pub fn get_config() -> String { \"config\".to_string() }"
|
||||
},
|
||||
"tests": {
|
||||
"test.rs": "fn test_secret() { assert!(true); }",
|
||||
"fixture.sql": "SELECT * FROM secret_table;"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Create second worktree with different private files
|
||||
fs.insert_tree(
|
||||
path!("/worktree2"),
|
||||
json!({
|
||||
".zed": {
|
||||
"settings.json": r#"{
|
||||
"file_scan_exclusions": ["**/internal.*"],
|
||||
"private_files": ["**/private.js", "**/data.json"]
|
||||
}"#
|
||||
},
|
||||
"lib": {
|
||||
"public.js": "export function getSecret() { return 'public'; }",
|
||||
"private.js": "const SECRET_KEY = \"private_value\";",
|
||||
"data.json": "{\"secret_data\": \"hidden\"}"
|
||||
},
|
||||
"docs": {
|
||||
"README.md": "# Documentation with secret info",
|
||||
"internal.md": "Internal secret documentation"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Set global settings
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<WorktreeSettings>(cx, |settings| {
|
||||
settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]);
|
||||
settings.private_files = Some(vec!["**/.env".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let project = Project::test(
|
||||
fs.clone(),
|
||||
[path!("/worktree1").as_ref(), path!("/worktree2").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Wait for worktrees to be fully scanned
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
|
||||
// Search for "secret" - should exclude files based on worktree-specific settings
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "secret",
|
||||
"case_sensitive": false
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
let paths = extract_paths_from_results(&content);
|
||||
|
||||
// Should find matches in non-private files
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("main.rs")),
|
||||
"Should find 'secret' in worktree1/src/main.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("test.rs")),
|
||||
"Should find 'secret' in worktree1/tests/test.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("public.js")),
|
||||
"Should find 'secret' in worktree2/lib/public.js"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("README.md")),
|
||||
"Should find 'secret' in worktree2/docs/README.md"
|
||||
);
|
||||
|
||||
// Should NOT find matches in private/excluded files based on worktree settings
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("secret.rs")),
|
||||
"Should not search in worktree1/src/secret.rs (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("fixture.sql")),
|
||||
"Should not search in worktree1/tests/fixture.sql (local file_scan_exclusions)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("private.js")),
|
||||
"Should not search in worktree2/lib/private.js (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("data.json")),
|
||||
"Should not search in worktree2/lib/data.json (local private_files)"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("internal.md")),
|
||||
"Should not search in worktree2/docs/internal.md (local file_scan_exclusions)"
|
||||
);
|
||||
|
||||
// Test with `include_pattern` specific to one worktree
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
"regex": "secret",
|
||||
"include_pattern": "worktree1/**/*.rs"
|
||||
});
|
||||
Arc::new(GrepTool)
|
||||
.run(
|
||||
input,
|
||||
Arc::default(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
model.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.output
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let content = result.content.as_str().unwrap();
|
||||
let paths = extract_paths_from_results(&content);
|
||||
|
||||
// Should only find matches in worktree1 *.rs files (excluding private ones)
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("main.rs")),
|
||||
"Should find match in worktree1/src/main.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().any(|p| p.contains("test.rs")),
|
||||
"Should find match in worktree1/tests/test.rs"
|
||||
);
|
||||
assert!(
|
||||
!paths.iter().any(|p| p.contains("secret.rs")),
|
||||
"Should not find match in excluded worktree1/src/secret.rs"
|
||||
);
|
||||
assert!(
|
||||
paths.iter().all(|p| !p.contains("worktree2")),
|
||||
"Should not find any matches in worktree2"
|
||||
);
|
||||
}
|
||||
|
||||
// Helper function to extract file paths from grep results
|
||||
fn extract_paths_from_results(results: &str) -> Vec<String> {
|
||||
results
|
||||
.lines()
|
||||
.filter(|line| line.starts_with("## Matches in "))
|
||||
.map(|line| {
|
||||
line.strip_prefix("## Matches in ")
|
||||
.unwrap()
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,3 +6,4 @@ Searches the contents of files in the project with a regular expression
|
||||
- Never use this tool to search for paths. Only search file contents with this tool.
|
||||
- Use this tool when you need to find files containing specific patterns
|
||||
- Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages.
|
||||
- DO NOT use HTML entities solely to escape characters in the tool parameters.
|
||||
|
||||