Compare commits
330 Commits
debug-flak
...
tool-calli
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bfa12d92ef | ||
|
|
f6b8fad275 | ||
|
|
14b26034d8 | ||
|
|
70e895a8c7 | ||
|
|
4bd935b409 | ||
|
|
c0df1e1846 | ||
|
|
e9d0768e3c | ||
|
|
380a99038b | ||
|
|
88653c4e3e | ||
|
|
3751f67730 | ||
|
|
6af385c09e | ||
|
|
e6cd1cf22b | ||
|
|
a1bd7a1297 | ||
|
|
3e31955b7f | ||
|
|
be86852f95 | ||
|
|
bde02a350e | ||
|
|
4c9311ba40 | ||
|
|
c8bc49fa18 | ||
|
|
bcd972fbb4 | ||
|
|
e423f03ba6 | ||
|
|
03ebbcbef6 | ||
|
|
27f97ba762 | ||
|
|
769ae8b101 | ||
|
|
d27fef7b2c | ||
|
|
f4bbbe69b4 | ||
|
|
c937a2fcdd | ||
|
|
a5279cc48a | ||
|
|
4d56252bae | ||
|
|
0360cda543 | ||
|
|
5e04753d1c | ||
|
|
71312e5692 | ||
|
|
05825e9804 | ||
|
|
73d682c010 | ||
|
|
e59e47fe7f | ||
|
|
4abf7f058e | ||
|
|
f980e40993 | ||
|
|
57b2cb6f60 | ||
|
|
af014a2530 | ||
|
|
243fb3562c | ||
|
|
e830865eb1 | ||
|
|
7aa6f4788d | ||
|
|
18daf17d0e | ||
|
|
856d9632e4 | ||
|
|
745d2e4d3b | ||
|
|
50dbab0747 | ||
|
|
70c22cbdd6 | ||
|
|
9621005851 | ||
|
|
05003ed4c5 | ||
|
|
2c610c0e57 | ||
|
|
479ffbbd51 | ||
|
|
fe23504eba | ||
|
|
95d82f88de | ||
|
|
4000b0a02c | ||
|
|
02c43a5bf2 | ||
|
|
f2060ccbe0 | ||
|
|
13693ff80f | ||
|
|
ec5886a078 | ||
|
|
10c9e337cf | ||
|
|
1da6a12bb4 | ||
|
|
cc1d3f0a35 | ||
|
|
22118f15e9 | ||
|
|
0d5de88c4b | ||
|
|
f291677d40 | ||
|
|
9d736fe80c | ||
|
|
f3ad754396 | ||
|
|
86456ce379 | ||
|
|
d755d29577 | ||
|
|
ab3c9f0678 | ||
|
|
201db23b58 | ||
|
|
beb8fbdf7f | ||
|
|
d2501e8886 | ||
|
|
82d6ad4616 | ||
|
|
a60b3b9389 | ||
|
|
06863144c6 | ||
|
|
b7c6f3e98e | ||
|
|
7146087b44 | ||
|
|
6d3eaa055f | ||
|
|
f31c55a76f | ||
|
|
97750529fe | ||
|
|
cd9dd5ccf7 | ||
|
|
3ce864e69e | ||
|
|
9eeb564c5c | ||
|
|
847bd35bd9 | ||
|
|
b8e5ddf456 | ||
|
|
6998c03c59 | ||
|
|
8631180e43 | ||
|
|
cd9a42e8da | ||
|
|
3246a932ca | ||
|
|
8ba392bba6 | ||
|
|
856a8ef5e8 | ||
|
|
6dd9ce1376 | ||
|
|
fbbea7ab01 | ||
|
|
aded3dfb05 | ||
|
|
3053f98652 | ||
|
|
ebd407deb6 | ||
|
|
5e44f5fa44 | ||
|
|
a07122d78f | ||
|
|
6a079cbdc3 | ||
|
|
c7e2d5bd89 | ||
|
|
6bff8ecb73 | ||
|
|
e2113e4895 | ||
|
|
b56e4ff2af | ||
|
|
88f68101d4 | ||
|
|
d852a32ef1 | ||
|
|
b53c3b84e2 | ||
|
|
3d6b07d78c | ||
|
|
4c0d0ad4f4 | ||
|
|
55575ca830 | ||
|
|
ea44af4a85 | ||
|
|
5865c5e80f | ||
|
|
51f8013616 | ||
|
|
b9570218b6 | ||
|
|
fb4d77c008 | ||
|
|
b14bb6bda4 | ||
|
|
8501ae6a19 | ||
|
|
4e88a08ed4 | ||
|
|
659f34bf21 | ||
|
|
001376fd6d | ||
|
|
298ca5ff1b | ||
|
|
596ee58be8 | ||
|
|
fd4a4127eb | ||
|
|
0297a42735 | ||
|
|
9c9a0bd24f | ||
|
|
740c444089 | ||
|
|
325e6b9fef | ||
|
|
65c63defcc | ||
|
|
274e56b086 | ||
|
|
7fb906d774 | ||
|
|
d107d22c2d | ||
|
|
23dac9cfce | ||
|
|
777ddefa73 | ||
|
|
77a2f2490e | ||
|
|
846fc67519 | ||
|
|
0843da0f81 | ||
|
|
9e528dbb16 | ||
|
|
c6d6c44810 | ||
|
|
9de6d318e9 | ||
|
|
d540c95c81 | ||
|
|
f7d6818c97 | ||
|
|
06d3dc010c | ||
|
|
64b15d3a0e | ||
|
|
23c5fc1b03 | ||
|
|
c43eafbf9a | ||
|
|
62a890fb7f | ||
|
|
5ad3c6b6dc | ||
|
|
e004a573fd | ||
|
|
babf8923ee | ||
|
|
faf5ab7873 | ||
|
|
912b396e58 | ||
|
|
87d93033d1 | ||
|
|
af4b9805c9 | ||
|
|
ba6c36f370 | ||
|
|
7644605f0f | ||
|
|
b4a8f14a76 | ||
|
|
c84da37030 | ||
|
|
edf7f6defe | ||
|
|
1307a80e07 | ||
|
|
9d11a6ff78 | ||
|
|
6769e55ce0 | ||
|
|
855048041d | ||
|
|
d36ebc8c74 | ||
|
|
5062bf0b4d | ||
|
|
4d65f7eea3 | ||
|
|
7ae305ac0d | ||
|
|
ba4ff1df59 | ||
|
|
d034d73af9 | ||
|
|
5f7881fc1e | ||
|
|
b0c525af5f | ||
|
|
41a3e78b1e | ||
|
|
5021397c01 | ||
|
|
b2b9d4ccb6 | ||
|
|
4a43084cb7 | ||
|
|
fa76d8edcf | ||
|
|
5f8e799d60 | ||
|
|
38e3182bef | ||
|
|
ec093c390f | ||
|
|
3d1bf09299 | ||
|
|
53b711c2b4 | ||
|
|
d0f52e90e6 | ||
|
|
17ef9a367f | ||
|
|
53f828df7d | ||
|
|
d9a00b6f8b | ||
|
|
7d0386eff9 | ||
|
|
bdf1d4edea | ||
|
|
c262c81e52 | ||
|
|
a5cb66f0e1 | ||
|
|
a0d687c24a | ||
|
|
5f57efb266 | ||
|
|
6398b45084 | ||
|
|
728650f94a | ||
|
|
dde9d37cf9 | ||
|
|
5d77a7dc6c | ||
|
|
1fae99a7c4 | ||
|
|
eb210ca248 | ||
|
|
ddea18d546 | ||
|
|
b85dba106b | ||
|
|
4ba430b16c | ||
|
|
fe1f55cbfd | ||
|
|
01392c1329 | ||
|
|
a9397834eb | ||
|
|
4227a3d3e0 | ||
|
|
2a69420c42 | ||
|
|
d8a42bbf63 | ||
|
|
8f20ea1093 | ||
|
|
a20e92a8c1 | ||
|
|
c703e20a06 | ||
|
|
a955968de3 | ||
|
|
f597c29432 | ||
|
|
a5a7a835fe | ||
|
|
28baa56e3d | ||
|
|
2e23527e09 | ||
|
|
865904a0c9 | ||
|
|
10d2353e07 | ||
|
|
1ea363b020 | ||
|
|
0155435142 | ||
|
|
a334c69e05 | ||
|
|
31d283932c | ||
|
|
0ef19dedd2 | ||
|
|
83f6a7f228 | ||
|
|
2d96bba61f | ||
|
|
54039162ac | ||
|
|
45b45155d4 | ||
|
|
a4baba7edd | ||
|
|
cb2c334358 | ||
|
|
e8bcc412b5 | ||
|
|
7b88fc5cda | ||
|
|
2f3df9fd93 | ||
|
|
7d063aa10b | ||
|
|
9c26d07f7a | ||
|
|
0a9d50bf01 | ||
|
|
c7331b41e9 | ||
|
|
70f7f2d2da | ||
|
|
cd9b25d827 | ||
|
|
781633fb1a | ||
|
|
6dfb0a4a70 | ||
|
|
a1670551bf | ||
|
|
71cbfc65b1 | ||
|
|
1218a846c1 | ||
|
|
48c253feb8 | ||
|
|
46b7fa9bcb | ||
|
|
022e662815 | ||
|
|
d89e2592a1 | ||
|
|
a072caab0b | ||
|
|
fbe30c6f2e | ||
|
|
a5b8094082 | ||
|
|
d2efa12e16 | ||
|
|
5e635b8914 | ||
|
|
0a02691778 | ||
|
|
1dc4d4200f | ||
|
|
b22718e643 | ||
|
|
bc16c2f85d | ||
|
|
1805986d82 | ||
|
|
ee0dfe9c44 | ||
|
|
f4074d784c | ||
|
|
e58db43ed8 | ||
|
|
ec487d8f64 | ||
|
|
b9a53ffa0b | ||
|
|
7733bf686b | ||
|
|
edddc68da6 | ||
|
|
5de5d5bc56 | ||
|
|
5467e18a5b | ||
|
|
4c7f1032a4 | ||
|
|
d61eaea4b9 | ||
|
|
f5d50f2b1e | ||
|
|
be4b19babb | ||
|
|
272be98ec8 | ||
|
|
e3d5eff858 | ||
|
|
9cb17ac630 | ||
|
|
fb541accb2 | ||
|
|
836f623800 | ||
|
|
18c2e8f6ca | ||
|
|
bf4645b1fe | ||
|
|
4d177918c1 | ||
|
|
87457f9ae8 | ||
|
|
be45f32753 | ||
|
|
18b5a87298 | ||
|
|
48211e8ce2 | ||
|
|
5008a388e6 | ||
|
|
3c417864e6 | ||
|
|
ad3055076d | ||
|
|
7c63f26aa9 | ||
|
|
f15a441c9d | ||
|
|
cb217381ce | ||
|
|
862f5a0561 | ||
|
|
7d30175527 | ||
|
|
d044685706 | ||
|
|
0496d0db9a | ||
|
|
c19e71f51e | ||
|
|
f7ff9b0811 | ||
|
|
be1ff8aee9 | ||
|
|
3a83fecea9 | ||
|
|
bac4a0428d | ||
|
|
ed3d3dc690 | ||
|
|
24d9374744 | ||
|
|
5d751f232c | ||
|
|
3fb3148995 | ||
|
|
76ce1a8f50 | ||
|
|
80558a3986 | ||
|
|
cdfadcc582 | ||
|
|
013c9f0420 | ||
|
|
22a2cc6950 | ||
|
|
49effeb7ba | ||
|
|
edda634ca5 | ||
|
|
75948e536f | ||
|
|
eb7fe57453 | ||
|
|
6e08e49d30 | ||
|
|
2c8ead4423 | ||
|
|
ca2976559e | ||
|
|
457da7b27c | ||
|
|
a2424638f5 | ||
|
|
ba4fa17b83 | ||
|
|
f5f4578422 | ||
|
|
75775292b3 | ||
|
|
344e315174 | ||
|
|
8ef53aafa2 | ||
|
|
00c3c02f7d | ||
|
|
2edf224599 | ||
|
|
16a4c59be0 | ||
|
|
90a46b0463 | ||
|
|
09b216cf5e | ||
|
|
84b34677e2 | ||
|
|
5a090bc3f3 | ||
|
|
4852e170ff | ||
|
|
53bcc3649a | ||
|
|
9241b11e1f | ||
|
|
8e9e94de22 | ||
|
|
1607e4e0e1 | ||
|
|
ceffc7e1cf | ||
|
|
e69b995a3b | ||
|
|
f19b51c0b2 |
@@ -12,3 +12,7 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
# This cfg will reduce the size of `windows::core::Error` from 16 bytes to 4 bytes
|
||||
[target.'cfg(target_os = "windows")']
|
||||
rustflags = ["--cfg", "windows_slim_errors"]
|
||||
|
||||
2
.github/actions/run_tests/action.yml
vendored
@@ -10,7 +10,7 @@ runs:
|
||||
cargo install cargo-nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
2
.github/workflows/bump_patch_version.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}
|
||||
|
||||
60
.github/workflows/ci.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -40,10 +40,15 @@ jobs:
|
||||
|
||||
- name: Check spelling
|
||||
run: |
|
||||
if ! which typos > /dev/null; then
|
||||
cargo install typos-cli
|
||||
if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then
|
||||
echo "Installing typos-cli@$TYPOS_CLI_VERSION..."
|
||||
cargo install "typos-cli@$TYPOS_CLI_VERSION"
|
||||
else
|
||||
echo "typos-cli@$TYPOS_CLI_VERSION is already installed."
|
||||
fi
|
||||
typos
|
||||
env:
|
||||
TYPOS_CLI_VERSION: "1.23.3"
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
@@ -85,7 +90,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -112,7 +117,7 @@ jobs:
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -132,17 +137,18 @@ jobs:
|
||||
runs-on: hosted-windows-1
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
# Windows can't run shell scripts, so we need to use `cargo xtask`.
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build -p zed
|
||||
@@ -165,12 +171,12 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
# We need to fetch more than one commit so that `script/draft-release-notes`
|
||||
# is able to diff between the current and previous tag.
|
||||
@@ -225,32 +231,34 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (universal) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- uses: softprops/action-gh-release@v1
|
||||
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-macos-x86_64.gz
|
||||
target/zed-remote-server-macos-aarch64.gz
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
target/release/Zed.dmg
|
||||
@@ -273,7 +281,7 @@ jobs:
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -311,18 +319,20 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: target/release/zed-linux-x86_64.tar.gz
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -338,11 +348,11 @@ jobs:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
- name: "Setup jq"
|
||||
uses: dcarbone/install-jq-action@v2
|
||||
uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2
|
||||
|
||||
- name: Set up Clang
|
||||
run: |
|
||||
@@ -350,7 +360,7 @@ jobs:
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@v1
|
||||
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
@@ -393,19 +403,21 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: target/release/zed-linux-aarch64.tar.gz
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/danger.yml
vendored
@@ -14,14 +14,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
|
||||
- uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
12
.github/workflows/deploy_cloudflare.yml
vendored
@@ -12,12 +12,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@v2
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
@@ -28,28 +28,28 @@ jobs:
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: pages deploy target/deploy --project-name=docs
|
||||
|
||||
- name: Deploy Install
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
|
||||
|
||||
- name: Deploy Docs Workers
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy .cloudflare/docs-proxy/src/worker.js
|
||||
|
||||
- name: Deploy Install Workers
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
|
||||
6
.github/workflows/deploy_collab.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
run: doctl registry login
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
4
.github/workflows/publish_extension_cli.yml
vendored
@@ -16,12 +16,12 @@ jobs:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
|
||||
4
.github/workflows/randomized_tests.yml
vendored
@@ -23,12 +23,12 @@ jobs:
|
||||
- randomized-tests
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
4
.github/workflows/release_actions.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
fi
|
||||
echo "::set-output name=URL::$URL"
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
uses: 2428392/gh-truncate-string-action@67b1b814955634208b103cff064be3cb1c7a19be # v1.3.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
maxLength: 2000
|
||||
truncationSymbol: "..."
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
uses: tsickert/discord-webhook@c840d45a03a323fbc3f7507ac7769dbd91bfb164 # v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
18
.github/workflows/release_nightly.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -33,6 +33,7 @@ jobs:
|
||||
|
||||
- name: Run clippy
|
||||
run: ./script/clippy
|
||||
|
||||
tests:
|
||||
timeout-minutes: 60
|
||||
name: Run tests
|
||||
@@ -43,7 +44,7 @@ jobs:
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -68,12 +69,12 @@ jobs:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -107,7 +108,7 @@ jobs:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -132,7 +133,6 @@ jobs:
|
||||
name: Create a Linux *.tar.gz bundle for ARM
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- hosted-linux-arm-1
|
||||
needs: tests
|
||||
env:
|
||||
@@ -141,12 +141,12 @@ jobs:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: "Setup jq"
|
||||
uses: dcarbone/install-jq-action@v2
|
||||
uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2
|
||||
|
||||
- name: Set up Clang
|
||||
run: |
|
||||
@@ -154,7 +154,7 @@ jobs:
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@v1
|
||||
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
|
||||
@@ -8,8 +8,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
|
||||
@@ -8,8 +8,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
|
||||
1961
Cargo.lock
generated
141
Cargo.toml
@@ -1,11 +1,11 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"crates/activity_indicator",
|
||||
"crates/anthropic",
|
||||
"crates/assets",
|
||||
"crates/assistant",
|
||||
"crates/assistant_slash_command",
|
||||
"crates/assistant_tooling",
|
||||
"crates/audio",
|
||||
"crates/auto_update",
|
||||
"crates/breadcrumbs",
|
||||
@@ -19,6 +19,7 @@ members = [
|
||||
"crates/collections",
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/completion",
|
||||
"crates/copilot",
|
||||
"crates/db",
|
||||
"crates/dev_server_projects",
|
||||
@@ -43,13 +44,14 @@ members = [
|
||||
"crates/gpui_macros",
|
||||
"crates/headless",
|
||||
"crates/html_to_markdown",
|
||||
"crates/http",
|
||||
"crates/http_client",
|
||||
"crates/image_viewer",
|
||||
"crates/indexed_docs",
|
||||
"crates/inline_completion_button",
|
||||
"crates/install_cli",
|
||||
"crates/journal",
|
||||
"crates/language",
|
||||
"crates/language_model",
|
||||
"crates/language_selector",
|
||||
"crates/language_tools",
|
||||
"crates/languages",
|
||||
@@ -79,6 +81,8 @@ members = [
|
||||
"crates/refineable",
|
||||
"crates/refineable/derive_refineable",
|
||||
"crates/release_channel",
|
||||
"crates/remote",
|
||||
"crates/remote_server",
|
||||
"crates/repl",
|
||||
"crates/rich_text",
|
||||
"crates/rope",
|
||||
@@ -86,7 +90,9 @@ members = [
|
||||
"crates/search",
|
||||
"crates/semantic_index",
|
||||
"crates/semantic_version",
|
||||
"crates/session",
|
||||
"crates/settings",
|
||||
"crates/settings_ui",
|
||||
"crates/snippet",
|
||||
"crates/snippet_provider",
|
||||
"crates/sqlez",
|
||||
@@ -119,6 +125,10 @@ members = [
|
||||
"crates/zed",
|
||||
"crates/zed_actions",
|
||||
|
||||
#
|
||||
# Extensions
|
||||
#
|
||||
|
||||
"extensions/astro",
|
||||
"extensions/clojure",
|
||||
"extensions/csharp",
|
||||
@@ -137,6 +147,7 @@ members = [
|
||||
"extensions/php",
|
||||
"extensions/prisma",
|
||||
"extensions/purescript",
|
||||
"extensions/ruff",
|
||||
"extensions/ruby",
|
||||
"extensions/snippets",
|
||||
"extensions/svelte",
|
||||
@@ -147,19 +158,25 @@ members = [
|
||||
"extensions/vue",
|
||||
"extensions/zig",
|
||||
|
||||
#
|
||||
# Tooling
|
||||
#
|
||||
|
||||
"tooling/xtask",
|
||||
]
|
||||
default-members = ["crates/zed"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
#
|
||||
# Workspace member crates
|
||||
#
|
||||
|
||||
activity_indicator = { path = "crates/activity_indicator" }
|
||||
ai = { path = "crates/ai" }
|
||||
anthropic = { path = "crates/anthropic" }
|
||||
assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_tooling = { path = "crates/assistant_tooling" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
breadcrumbs = { path = "crates/breadcrumbs" }
|
||||
@@ -173,6 +190,7 @@ collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
completion = { path = "crates/completion" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
db = { path = "crates/db" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
@@ -195,13 +213,14 @@ gpui = { path = "crates/gpui" }
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
headless = { path = "crates/headless" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http = { path = "crates/http" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion_button = { path = "crates/inline_completion_button" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
journal = { path = "crates/journal" }
|
||||
language = { path = "crates/language" }
|
||||
language_model = { path = "crates/language_model" }
|
||||
language_selector = { path = "crates/language_selector" }
|
||||
language_tools = { path = "crates/language_tools" }
|
||||
languages = { path = "crates/languages" }
|
||||
@@ -230,7 +249,10 @@ project_symbols = { path = "crates/project_symbols" }
|
||||
proto = { path = "crates/proto" }
|
||||
quick_action_bar = { path = "crates/quick_action_bar" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
refineable = { path = "crates/refineable" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
remote = { path = "crates/remote" }
|
||||
remote_server = { path = "crates/remote_server" }
|
||||
repl = { path = "crates/repl" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
@@ -238,7 +260,9 @@ rpc = { path = "crates/rpc" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_index = { path = "crates/semantic_index" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
session = { path = "crates/session" }
|
||||
settings = { path = "crates/settings" }
|
||||
settings_ui = { path = "crates/settings_ui" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
snippet_provider = { path = "crates/snippet_provider" }
|
||||
sqlez = { path = "crates/sqlez" }
|
||||
@@ -271,37 +295,44 @@ worktree = { path = "crates/worktree" }
|
||||
zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
|
||||
#
|
||||
# External crates
|
||||
#
|
||||
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = "0.23"
|
||||
any_vec = "0.13"
|
||||
anyhow = "1.0.57"
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
ashpd = "0.9.1"
|
||||
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
|
||||
async-dispatcher = { version = "0.1" }
|
||||
async-dispatcher = "0.1"
|
||||
async-fs = "1.6"
|
||||
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.23"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.13"
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
|
||||
blade-macros = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
|
||||
blade-util = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
|
||||
cap-std = "3.0"
|
||||
blade-graphics = { git = "https://github.com/zed-industries/blade", rev = "7e497c534d5d4a30c18d9eb182cf39eaf0aaa25e" }
|
||||
blade-macros = { git = "https://github.com/zed-industries/blade", rev = "7e497c534d5d4a30c18d9eb182cf39eaf0aaa25e" }
|
||||
blade-util = { git = "https://github.com/zed-industries/blade", rev = "7e497c534d5d4a30c18d9eb182cf39eaf0aaa25e" }
|
||||
cargo_metadata = "0.18"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
clickhouse = { version = "0.11.6" }
|
||||
clickhouse = "0.11.6"
|
||||
cocoa = "0.25"
|
||||
core-foundation = { version = "0.9.3" }
|
||||
core-foundation = "0.9.3"
|
||||
core-foundation-sys = "0.8.6"
|
||||
ctor = "0.2.6"
|
||||
dashmap = "5.5.3"
|
||||
dashmap = "6.0"
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
emojis = "0.6.1"
|
||||
env_logger = "0.9"
|
||||
env_logger = "0.11"
|
||||
exec = "0.3.1"
|
||||
fork = "0.1.23"
|
||||
futures = "0.3"
|
||||
@@ -315,16 +346,17 @@ html5ever = "0.27.0"
|
||||
ignore = "0.4.22"
|
||||
image = "0.25.1"
|
||||
indexmap = { version = "1.6.2", features = ["serde"] }
|
||||
indoc = "1"
|
||||
indoc = "2"
|
||||
# We explicitly disable http2 support in isahc.
|
||||
isahc = { version = "1.7.2", default-features = false, features = [
|
||||
"text-decoding",
|
||||
] }
|
||||
itertools = "0.11.0"
|
||||
jsonwebtoken = "9.3"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2"
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
nanoid = "0.4"
|
||||
nix = "0.28"
|
||||
@@ -342,15 +374,16 @@ prost-build = "0.9"
|
||||
prost-types = "0.9"
|
||||
pulldown-cmark = { version = "0.10.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
refineable = { path = "./crates/refineable" }
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.12", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
schemars = "0.8"
|
||||
schemars = {version = "0.8", features = ["impl_json_schema"]}
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
@@ -370,6 +403,7 @@ smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.30.7"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "1.0.29"
|
||||
@@ -381,32 +415,32 @@ time = { version = "0.3", features = [
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
] }
|
||||
tiny_http = "0.8"
|
||||
toml = "0.8"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.20", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.20.5"
|
||||
tree-sitter-c = "0.20.1"
|
||||
tree-sitter-cpp = "0.20.5"
|
||||
tree-sitter-css = "0.20"
|
||||
tree-sitter-elixir = "0.1.1"
|
||||
tree-sitter = { version = "0.22", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.21"
|
||||
tree-sitter-c = "0.21"
|
||||
tree-sitter-cpp = "0.22"
|
||||
tree-sitter-css = "0.21"
|
||||
tree-sitter-elixir = "0.2"
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "b82ab803d887002a0af11f6ce63d72884580bf33" }
|
||||
tree-sitter-gomod = "1.0.1"
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
|
||||
rustc-demangle = "0.1.23"
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-jsdoc = { git = "https://github.com/tree-sitter/tree-sitter-jsdoc", rev = "6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" }
|
||||
tree-sitter-json = "0.20.2"
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
|
||||
tree-sitter-python = "0.20.2"
|
||||
tree-sitter-regex = "0.20.0"
|
||||
tree-sitter-ruby = "0.20.0"
|
||||
tree-sitter-rust = "0.20.3"
|
||||
tree-sitter-typescript = "0.20.5"
|
||||
tree-sitter-yaml = "0.0.1"
|
||||
tree-sitter-go = "0.21"
|
||||
tree-sitter-go-mod = { git = "https://github.com/SomeoneToIgnore/tree-sitter-go-mod", rev = "8c1f54f12bb4c846336b634bc817645d6f35d641", package = "tree-sitter-gomod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work", rev = "dcbabff454703c3a4bc98a23cf8778d4be46fd22" }
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "6dd0303acf7138dd2b9b432a229e16539581c701" }
|
||||
tree-sitter-html = "0.20"
|
||||
tree-sitter-jsdoc = "0.21"
|
||||
tree-sitter-json = "0.21"
|
||||
tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "e3855e37f8f2c71aa7513c18a9c95fb7461b1b10" }
|
||||
protols-tree-sitter-proto = "0.2"
|
||||
tree-sitter-python = "0.21"
|
||||
tree-sitter-regex = "0.21"
|
||||
tree-sitter-ruby = "0.21"
|
||||
tree-sitter-rust = "0.21"
|
||||
tree-sitter-typescript = "0.21"
|
||||
tree-sitter-yaml = "0.6"
|
||||
unindent = "0.1.7"
|
||||
unicase = "2.6"
|
||||
unicode-segmentation = "1.10"
|
||||
@@ -414,20 +448,19 @@ url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
|
||||
wasmparser = "0.201"
|
||||
wasm-encoder = "0.201"
|
||||
wasmtime = { version = "19.0.0", default-features = false, features = [
|
||||
wasmtime = { version = "21.0.1", default-features = false, features = [
|
||||
"async",
|
||||
"demangle",
|
||||
"runtime",
|
||||
"cranelift",
|
||||
"component-model",
|
||||
] }
|
||||
wasmtime-wasi = "19.0.0"
|
||||
wasmtime-wasi = "21.0.1"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.201"
|
||||
sys-locale = "0.3.1"
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.57"
|
||||
version = "0.58"
|
||||
features = [
|
||||
"implement",
|
||||
"Foundation_Numerics",
|
||||
@@ -447,7 +480,6 @@ features = [
|
||||
"Win32_Security",
|
||||
"Win32_Security_Credentials",
|
||||
"Win32_Storage_FileSystem",
|
||||
"Win32_System_LibraryLoader",
|
||||
"Win32_System_Com",
|
||||
"Win32_System_Com_StructuredStorage",
|
||||
"Win32_System_DataExchange",
|
||||
@@ -457,7 +489,6 @@ features = [
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Time",
|
||||
"Win32_System_WinRT",
|
||||
"Win32_UI_Controls",
|
||||
"Win32_UI_HiDpi",
|
||||
@@ -468,9 +499,8 @@ features = [
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7b4894ba2ae81b988846676f54c0988d4027ef4f" }
|
||||
# Workaround for a broken nightly build of gpui: See #7644 and revisit once 0.5.3 is released.
|
||||
pathfinder_simd = { git = "https://github.com/servo/pathfinder.git", rev = "4968e819c0d9b015437ffc694511e175801a17c7" }
|
||||
# Patch Tree-sitter for updated wasmtime.
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7f4a57817d58a2f134fe863674acad6bbf007228" }
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
@@ -523,13 +553,6 @@ single_range_in_vec_init = "allow"
|
||||
style = { level = "allow", priority = -1 }
|
||||
|
||||
# Individual rules that have violations in the codebase:
|
||||
almost_complete_range = "allow"
|
||||
arc_with_non_send_sync = "allow"
|
||||
borrowed_box = "allow"
|
||||
let_underscore_future = "allow"
|
||||
map_entry = "allow"
|
||||
non_canonical_partial_ord_impl = "allow"
|
||||
reversed_empty_ranges = "allow"
|
||||
type_complexity = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.79-bookworm as builder
|
||||
FROM rust:1.80-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
1
assets/icons/eye.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-eye"><path d="M2.062 12.348a1 1 0 0 1 0-.696 10.75 10.75 0 0 1 19.876 0 1 1 0 0 1 0 .696 10.75 10.75 0 0 1-19.876 0"/><circle cx="12" cy="12" r="3"/></svg>
|
||||
|
After Width: | Height: | Size: 358 B |
1
assets/icons/file_code.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-file-code"><path d="M10 12.5 8 15l2 2.5"/><path d="m14 12.5 2 2.5-2 2.5"/><path d="M14 2v4a2 2 0 0 0 2 2h4"/><path d="M15 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V7z"/></svg>
|
||||
|
After Width: | Height: | Size: 388 B |
1
assets/icons/file_text.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-file-text"><path d="M15 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V7Z"/><path d="M14 2v4a2 2 0 0 0 2 2h4"/><path d="M10 9H8"/><path d="M16 13H8"/><path d="M16 17H8"/></svg>
|
||||
|
After Width: | Height: | Size: 384 B |
6
assets/icons/sliders-alt.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 450 B |
3
assets/icons/sparkle_alt.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6 6C5.69062 6.30938 4.56159 6.55977 3.51192 6.73263C3.27345 6.7719 3.27345 7.2281 3.51192 7.26737C4.56159 7.44023 5.69062 7.69062 6 8C6.30938 8.30938 6.55977 9.43841 6.73263 10.4881C6.7719 10.7266 7.2281 10.7266 7.26737 10.4881C7.44023 9.43841 7.69062 8.30938 8 8C8.30938 7.69062 9.43841 7.44023 10.4881 7.26737C10.7266 7.2281 10.7266 6.7719 10.4881 6.73263C9.43841 6.55977 8.30938 6.30938 8 6C7.69062 5.69062 7.44023 4.56159 7.26737 3.51192C7.2281 3.27345 6.7719 3.27345 6.73263 3.51192C6.55977 4.56159 6.30938 5.69062 6 6Z" stroke="black" stroke-width="1.25" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 700 B |
@@ -1,3 +1,3 @@
|
||||
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9.88889 1H2.11111C1.49746 1 1 1.49746 1 2.11111V9.88889C1 10.5025 1.49746 11 2.11111 11H9.88889C10.5025 11 11 10.5025 11 9.88889V2.11111C11 1.49746 10.5025 1 9.88889 1Z" stroke="#C56757" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 9.8V4.2C4 4.08954 4.08954 4 4.2 4H9.8C9.91046 4 10 4.08954 10 4.2V9.8C10 9.91046 9.91046 10 9.8 10H4.2C4.08954 10 4 9.91046 4 9.8Z" stroke="#C56757" stroke-width="1.25" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 369 B After Width: | Height: | Size: 310 B |
@@ -40,7 +40,6 @@
|
||||
"backspace": "editor::Backspace",
|
||||
"shift-backspace": "editor::Backspace",
|
||||
"delete": "editor::Delete",
|
||||
"ctrl-d": "editor::Delete",
|
||||
"tab": "editor::Tab",
|
||||
"shift-tab": "editor::TabPrev",
|
||||
"ctrl-k": "editor::CutToEndOfLine",
|
||||
@@ -106,8 +105,8 @@
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"shift-enter": "editor::Newline",
|
||||
"ctrl-shift-enter": "editor::NewlineBelow",
|
||||
"ctrl-enter": "editor::NewlineAbove",
|
||||
"ctrl-shift-enter": "editor::NewlineBelow",
|
||||
"alt-z": "editor::ToggleSoftWrap",
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": ["buffer_search::Deploy", { "replace_enabled": true }],
|
||||
@@ -205,7 +204,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && in_replace",
|
||||
"context": "ProjectSearchBar && in_replace > Editor",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"ctrl-alt-enter": "search::ReplaceAll"
|
||||
@@ -269,6 +268,7 @@
|
||||
"alt-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
|
||||
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }],
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }],
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }],
|
||||
@@ -477,6 +477,12 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "repl::Run"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
@@ -595,6 +601,7 @@
|
||||
"ctrl-alt-space": "terminal::ShowCharacterPalette",
|
||||
"shift-ctrl-c": "terminal::Copy",
|
||||
"ctrl-insert": "terminal::Copy",
|
||||
"ctrl-a": "editor::SelectAll",
|
||||
"shift-ctrl-v": "terminal::Paste",
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
|
||||
@@ -135,6 +135,7 @@
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"shift-enter": "editor::Newline",
|
||||
"cmd-enter": "editor::NewlineBelow",
|
||||
"cmd-shift-enter": "editor::NewlineAbove",
|
||||
"alt-z": "editor::ToggleSoftWrap",
|
||||
"cmd-f": "buffer_search::Deploy",
|
||||
@@ -146,12 +147,6 @@
|
||||
"cmd-alt-e": "editor::SelectEnclosingSymbol"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full && !jupyter",
|
||||
"bindings": {
|
||||
"cmd-enter": "editor::NewlineBelow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full && inline_completion",
|
||||
"bindings": {
|
||||
@@ -180,6 +175,12 @@
|
||||
"cmd-c": "markdown::Copy"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "repl::Run"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AssistantPanel",
|
||||
"bindings": {
|
||||
@@ -255,7 +256,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && in_replace",
|
||||
"context": "ProjectSearchBar && in_replace > Editor",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"cmd-enter": "search::ReplaceAll"
|
||||
@@ -292,7 +293,6 @@
|
||||
"alt-cmd-c": "search::ToggleCaseSensitive",
|
||||
"alt-cmd-w": "search::ToggleWholeWord",
|
||||
"alt-cmd-f": "project_search::ToggleFilters",
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
@@ -570,12 +570,6 @@
|
||||
"space": "project_panel::Open"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
"cmd-enter": "repl::Run"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "CollabPanel && not_editing",
|
||||
"bindings": {
|
||||
@@ -628,6 +622,7 @@
|
||||
"ctrl-cmd-space": "terminal::ShowCharacterPalette",
|
||||
"cmd-c": "terminal::Copy",
|
||||
"cmd-v": "terminal::Paste",
|
||||
"cmd-a": "editor::SelectAll",
|
||||
"cmd-k": "terminal::Clear",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
// Some nice conveniences
|
||||
|
||||
@@ -12,8 +12,8 @@
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"ctrl-shift-up": "editor::AddSelectionAbove",
|
||||
"ctrl-shift-down": "editor::AddSelectionBelow",
|
||||
"ctrl-shift-up": "editor::MoveLineUp",
|
||||
"ctrl-shift-down": "editor::MoveLineDown",
|
||||
"ctrl-shift-m": "editor::SelectLargerSyntaxNode",
|
||||
"ctrl-shift-l": "editor::SplitSelectionIntoLines",
|
||||
"ctrl-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-up": "editor::AddSelectionAbove",
|
||||
"ctrl-shift-down": "editor::AddSelectionBelow",
|
||||
"cmd-ctrl-up": "editor::MoveLineUp",
|
||||
"cmd-ctrl-down": "editor::MoveLineDown",
|
||||
"cmd-shift-space": "editor::SelectAll",
|
||||
"ctrl-shift-m": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-l": "editor::SplitSelectionIntoLines",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"backspace": "vim::Backspace",
|
||||
"j": "vim::Down",
|
||||
"down": "vim::Down",
|
||||
"ctrl-j": "vim::Down",
|
||||
"enter": "vim::NextLineStart",
|
||||
"ctrl-m": "vim::NextLineStart",
|
||||
"+": "vim::NextLineStart",
|
||||
@@ -215,7 +216,7 @@
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"y": ["vim::PushOperator", "Yank"],
|
||||
"shift-y": "vim::YankLine",
|
||||
"shift-y": "vim::YankToEndOfLine",
|
||||
"i": "vim::InsertBefore",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"a": "vim::InsertAfter",
|
||||
@@ -252,7 +253,7 @@
|
||||
"[ d": "editor::GoToPrevDiagnostic",
|
||||
"] c": "editor::GoToHunk",
|
||||
"[ c": "editor::GoToPrevHunk",
|
||||
"g c c": "vim::ToggleComments"
|
||||
"g c": ["vim::PushOperator", "ToggleComments"]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -433,6 +434,12 @@
|
||||
"<": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == gc",
|
||||
"bindings": {
|
||||
"c": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar && !in_replace",
|
||||
"bindings": {
|
||||
|
||||
241
assets/prompts/operations.md
Normal file
@@ -0,0 +1,241 @@
|
||||
Your task is to map a step from the conversation above to operations on symbols inside the provided source files.
|
||||
|
||||
Guidelines:
|
||||
- There's no need to describe *what* to do, just *where* to do it.
|
||||
- If creating a file, assume any subsequent updates are included at the time of creation.
|
||||
- Don't create and then update a file.
|
||||
- We'll create it in one shot.
|
||||
- Prefer updating symbols lower in the syntax tree if possible.
|
||||
- Never include operations on a parent symbol and one of its children in the same <operations> block.
|
||||
- Never nest an operation with another operation or include CDATA or other content. All operations are leaf nodes.
|
||||
- Include a description attribute for each operation with a brief, one-line description of the change to perform.
|
||||
- Descriptions are required for all operations except delete.
|
||||
- When generating multiple operations, ensure the descriptions are specific to each individual operation.
|
||||
- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide.
|
||||
- Don't generate multiple operations at the same location. Instead, combine them together in a single operation with a succinct combined description.
|
||||
|
||||
The available operation types are:
|
||||
|
||||
1. <update>: Modify an existing symbol in a file.
|
||||
2. <create_file>: Create a new file.
|
||||
3. <insert_sibling_after>: Add a new symbol as sibling after an existing symbol in a file.
|
||||
4. <append_child>: Add a new symbol as the last child of an existing symbol in a file.
|
||||
5. <prepend_child>: Add a new symbol as the first child of an existing symbol in a file.
|
||||
6. <delete>: Remove an existing symbol from a file. The `description` attribute is invalid for delete, but required for other ops.
|
||||
|
||||
All operations *require* a path.
|
||||
Operations that *require* a symbol: <update>, <insert_sibling_after>, <delete>
|
||||
Operations that don't allow a symbol: <create>
|
||||
Operations that have an *optional* symbol: <prepend_child>, <append_child>
|
||||
|
||||
Example 1:
|
||||
|
||||
User:
|
||||
```rs src/rectangle.rs
|
||||
struct Rectangle {
|
||||
width: f64,
|
||||
height: f64,
|
||||
}
|
||||
|
||||
impl Rectangle {
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/rectangle.rs:
|
||||
- struct Rectangle
|
||||
- impl Rectangle
|
||||
- impl Rectangle fn new
|
||||
|
||||
<step>Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct</step>
|
||||
<step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
|
||||
What are the operations for the step: <step>Add a new method 'calculate_area' to the Rectangle struct</step>
|
||||
|
||||
Assistant (wrong):
|
||||
<operations>
|
||||
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate_area method" />
|
||||
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate_perimeter method" />
|
||||
</operations>
|
||||
|
||||
This demonstrates what NOT to do. NEVER append multiple children at the same location.
|
||||
|
||||
Assistant (corrected):
|
||||
<operations>
|
||||
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate area and perimeter methods" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the operations for the step: <step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<insert_sibling_after path="src/shapes.rs" symbol="impl Rectangle" description="Implement Display trait for Rectangle"/>
|
||||
</operations>
|
||||
|
||||
Example 2:
|
||||
|
||||
User:
|
||||
```rs src/user.rs
|
||||
struct User {
|
||||
pub name: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
}
|
||||
|
||||
impl User {
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
}
|
||||
|
||||
pub fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/user.rs:
|
||||
- struct User
|
||||
- struct User pub name
|
||||
- struct User age
|
||||
- struct User email
|
||||
- impl User
|
||||
- impl User fn new
|
||||
- impl User pub fn print_info
|
||||
|
||||
<step>Update the 'print_info' method to use formatted output</step>
|
||||
<step>Remove the 'email' field from the User struct</step>
|
||||
|
||||
What are the operations for the step: <step>Update the 'print_info' method to use formatted output</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<update path="src/user.rs" symbol="impl User fn print_info" description="Use formatted output" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the operations for the step: <step>Remove the 'email' field from the User struct</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<delete path="src/user.rs" symbol="struct User email" description="Remove the email field" />
|
||||
</operations>
|
||||
|
||||
Example 3:
|
||||
|
||||
User:
|
||||
```rs src/vehicle.rs
|
||||
struct Vehicle {
|
||||
make: String,
|
||||
model: String,
|
||||
year: u32,
|
||||
}
|
||||
|
||||
impl Vehicle {
|
||||
fn new(make: String, model: String, year: u32) -> Self {
|
||||
Vehicle { make, model, year }
|
||||
}
|
||||
|
||||
fn print_year(&self) {
|
||||
println!("Year: {}", self.year);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/vehicle.rs:
|
||||
- struct Vehicle
|
||||
- struct Vehicle make
|
||||
- struct Vehicle model
|
||||
- struct Vehicle year
|
||||
- impl Vehicle
|
||||
- impl Vehicle fn new
|
||||
- impl Vehicle fn print_year
|
||||
|
||||
<step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
<step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
|
||||
What are the operations for the step: <step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<prepend_child path="src/vehicle.rs" description="Add 'use std::fmt' statement" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the operations for the step: <step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<insert_sibling_after path="src/vehicle.rs" symbol="impl Vehicle fn new" description="Add start_engine method"/>
|
||||
</operations>
|
||||
|
||||
Example 4:
|
||||
|
||||
User:
|
||||
```rs src/employee.rs
|
||||
struct Employee {
|
||||
name: String,
|
||||
position: String,
|
||||
salary: u32,
|
||||
department: String,
|
||||
}
|
||||
|
||||
impl Employee {
|
||||
fn new(name: String, position: String, salary: u32, department: String) -> Self {
|
||||
Employee { name, position, salary, department }
|
||||
}
|
||||
|
||||
fn print_details(&self) {
|
||||
println!("Name: {}, Position: {}, Salary: {}, Department: {}",
|
||||
self.name, self.position, self.salary, self.department);
|
||||
}
|
||||
|
||||
fn give_raise(&mut self, amount: u32) {
|
||||
self.salary += amount;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/employee.rs:
|
||||
- struct Employee
|
||||
- struct Employee name
|
||||
- struct Employee position
|
||||
- struct Employee salary
|
||||
- struct Employee department
|
||||
- impl Employee
|
||||
- impl Employee fn new
|
||||
- impl Employee fn print_details
|
||||
- impl Employee fn give_raise
|
||||
|
||||
<step>Make salary an f32</step>
|
||||
|
||||
What are the operations for the step: <step>Make salary an f32</step>
|
||||
|
||||
A (wrong):
|
||||
<operations>
|
||||
<update path="src/employee.rs" symbol="struct Employee" description="Change the type of salary to an f32" />
|
||||
<update path="src/employee.rs" symbol="struct Employee salary" description="Change the type to an f32" />
|
||||
</operations>
|
||||
|
||||
This example demonstrates what not to do. `struct Employee salary` is a child of `struct Employee`.
|
||||
|
||||
A (corrected):
|
||||
<operations>
|
||||
<update path="src/employee.rs" symbol="struct Employee salary" description="Change the type to an f32" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the correct operations for the step: <step>Remove the 'department' field and update the 'print_details' method</step>
|
||||
|
||||
A:
|
||||
<operations>
|
||||
<delete path="src/employee.rs" symbol="struct Employee department" />
|
||||
<update path="src/employee.rs" symbol="impl Employee fn print_details" description="Don't print the 'department' field" />
|
||||
</operations>
|
||||
|
||||
Now generate the operations for the following step.
|
||||
Output only valid XML containing valid operations with their required attributes.
|
||||
NEVER output code or any other text inside <operation> tags. If you do, you will replaced with another model.
|
||||
Your response *MUST* begin with <operations> and end with </operations>:
|
||||
@@ -26,6 +26,9 @@
|
||||
},
|
||||
// The name of a font to use for rendering text in the editor
|
||||
"buffer_font_family": "Zed Plex Mono",
|
||||
// Set the buffer text's font fallbacks, this will be merged with
|
||||
// the platform's default fallbacks.
|
||||
"buffer_font_fallbacks": [],
|
||||
// The OpenType features to enable for text in the editor.
|
||||
"buffer_font_features": {
|
||||
// Disable ligatures:
|
||||
@@ -47,8 +50,11 @@
|
||||
// },
|
||||
"buffer_line_height": "comfortable",
|
||||
// The name of a font to use for rendering text in the UI
|
||||
// (On macOS) You can set this to ".SystemUIFont" to use the system font
|
||||
// You can set this to ".SystemUIFont" to use the system font
|
||||
"ui_font_family": "Zed Plex Sans",
|
||||
// Set the UI's font fallbacks, this will be merged with the platform's
|
||||
// default font fallbacks.
|
||||
"ui_font_fallbacks": [],
|
||||
// The OpenType features to enable for text in the UI
|
||||
"ui_font_features": {
|
||||
// Disable ligatures:
|
||||
@@ -82,7 +88,7 @@
|
||||
// Whether to confirm before quitting Zed.
|
||||
"confirm_quit": false,
|
||||
// Whether to restore last closed project when fresh Zed instance is opened.
|
||||
"restore_on_startup": "last_workspace",
|
||||
"restore_on_startup": "last_session",
|
||||
// Size of the drop target in the editor.
|
||||
"drop_target_size": 0.2,
|
||||
// Whether the window should be closed when using 'close active item' on a window with no tabs.
|
||||
@@ -312,7 +318,7 @@
|
||||
"auto_reveal_entries": true,
|
||||
// Whether to fold directories automatically and show compact folders
|
||||
// (e.g. "a/b/c" ) when a directory has only one subdirectory inside.
|
||||
"auto_fold_dirs": false,
|
||||
"auto_fold_dirs": true,
|
||||
/// Scrollbar-related settings
|
||||
"scrollbar": {
|
||||
/// When to show the scrollbar in the project panel.
|
||||
@@ -375,7 +381,7 @@
|
||||
},
|
||||
"assistant": {
|
||||
// Version of this setting.
|
||||
"version": "1",
|
||||
"version": "2",
|
||||
// Whether the assistant is enabled.
|
||||
"enabled": true,
|
||||
// Whether to show the assistant panel button in the status bar.
|
||||
@@ -386,17 +392,12 @@
|
||||
"default_width": 640,
|
||||
// Default height when the assistant is docked to the bottom.
|
||||
"default_height": 320,
|
||||
// AI provider.
|
||||
"provider": {
|
||||
"name": "openai",
|
||||
// The default model to use when creating new contexts. This
|
||||
// setting can take three values:
|
||||
//
|
||||
// 1. "gpt-3.5-turbo"
|
||||
// 2. "gpt-4"
|
||||
// 3. "gpt-4-turbo-preview"
|
||||
// 4. "gpt-4o"
|
||||
"default_model": "gpt-4o"
|
||||
// The default model to use when creating new contexts.
|
||||
"default_model": {
|
||||
// The provider to use.
|
||||
"provider": "openai",
|
||||
// The model to use.
|
||||
"model": "gpt-4o"
|
||||
}
|
||||
},
|
||||
// Whether the screen sharing icon is shown in the os status bar.
|
||||
@@ -680,6 +681,10 @@
|
||||
// Set the terminal's font family. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font family.
|
||||
// "font_family": "Zed Plex Mono",
|
||||
// Set the terminal's font fallbacks. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font fallbacks.
|
||||
// This will be merged with the platform's default font fallbacks
|
||||
// "font_fallbacks": ["FiraCode Nerd Fonts"],
|
||||
// Sets the maximum number of lines in the terminal's scrollback buffer.
|
||||
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
|
||||
// Existing terminals will not pick up this change until they are recreated.
|
||||
@@ -704,7 +709,7 @@
|
||||
//
|
||||
"file_types": {
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json"]
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "tsconfig.json"]
|
||||
},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
@@ -742,6 +747,9 @@
|
||||
"Elixir": {
|
||||
"language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
|
||||
},
|
||||
"Erlang": {
|
||||
"language_servers": ["erlang-ls", "!elp", "..."]
|
||||
},
|
||||
"Go": {
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
@@ -798,7 +806,7 @@
|
||||
}
|
||||
},
|
||||
"Ruby": {
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "..."]
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "..."]
|
||||
},
|
||||
"SCSS": {
|
||||
"prettier": {
|
||||
@@ -811,6 +819,9 @@
|
||||
"plugins": ["prettier-plugin-sql"]
|
||||
}
|
||||
},
|
||||
"Starlark": {
|
||||
"language_servers": ["starpls", "!buck2-lsp", "..."]
|
||||
},
|
||||
"Svelte": {
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
@@ -851,6 +862,18 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
// Different settings for specific language models.
|
||||
"language_models": {
|
||||
"anthropic": {
|
||||
"api_url": "https://api.anthropic.com"
|
||||
},
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434"
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
// Allows to enable/disable formatting with Prettier
|
||||
// and configure default Prettier, used when no project-level Prettier installation is found.
|
||||
@@ -883,6 +906,15 @@
|
||||
// }
|
||||
// }
|
||||
},
|
||||
// Jupyter settings
|
||||
"jupyter": {
|
||||
"enabled": true
|
||||
// Specify the language name as the key and the kernel name as the value.
|
||||
// "kernel_selections": {
|
||||
// "python": "conda-base"
|
||||
// "typescript": "deno"
|
||||
// }
|
||||
},
|
||||
// Vim settings
|
||||
"vim": {
|
||||
"use_system_clipboard": "always",
|
||||
@@ -935,5 +967,29 @@
|
||||
// Examples:
|
||||
// - "proxy": "socks5://localhost:10808"
|
||||
// - "proxy": "http://127.0.0.1:10809"
|
||||
"proxy": null
|
||||
"proxy": null,
|
||||
// Set to configure aliases for the command palette.
|
||||
// When typing a query which is a key of this object, the value will be used instead.
|
||||
//
|
||||
// Examples:
|
||||
// {
|
||||
// "W": "workspace::Save"
|
||||
// }
|
||||
"command_aliases": {},
|
||||
// ssh_connections is an array of ssh connections.
|
||||
// By default this setting is null, which disables the direct ssh connection support.
|
||||
// You can configure these from `project: Open Remote` in the command palette.
|
||||
// Zed's ssh support will pull configuration from your ~/.ssh too.
|
||||
// Examples:
|
||||
// [
|
||||
// {
|
||||
// "host": "example-box",
|
||||
// "projects": [
|
||||
// {
|
||||
// "paths": ["/home/user/code/zed"]
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": null
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// Folder-specific settings
|
||||
//
|
||||
// For a full list of overridable settings, and general information on folder-specific settings,
|
||||
// see the documentation: https://zed.dev/docs/configuring-zed#folder-specific-settings
|
||||
// see the documentation: https://zed.dev/docs/configuring-zed#settings-files
|
||||
{}
|
||||
|
||||
@@ -17,6 +17,27 @@
|
||||
// What to do with the terminal pane and tab, after the command was started:
|
||||
// * `always` — always show the terminal pane, add and focus the corresponding task's tab in it (default)
|
||||
// * `never` — avoid changing current terminal pane focus, but still add/reuse the task's tab there
|
||||
"reveal": "always"
|
||||
"reveal": "always",
|
||||
// What to do with the terminal pane and tab, after the command had finished:
|
||||
// * `never` — Do nothing when the command finishes (default)
|
||||
// * `always` — always hide the terminal tab, hide the pane also if it was the last tab in it
|
||||
// * `on_success` — hide the terminal tab on task success only, otherwise behaves similar to `always`
|
||||
"hide": "never",
|
||||
// Which shell to use when running a task inside the terminal.
|
||||
// May take 3 values:
|
||||
// 1. (default) Use the system's default terminal configuration in /etc/passwd
|
||||
// "shell": "system"
|
||||
// 2. A program:
|
||||
// "shell": {
|
||||
// "program": "sh"
|
||||
// }
|
||||
// 3. A program with arguments:
|
||||
// "shell": {
|
||||
// "with_arguments": {
|
||||
// "program": "/bin/bash",
|
||||
// "arguments": ["--login"]
|
||||
// }
|
||||
// }
|
||||
"shell": "system"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -107,6 +107,7 @@ impl ActivityIndicator {
|
||||
Editor::for_buffer(buffer, Some(project.clone()), cx)
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
})?;
|
||||
|
||||
@@ -18,7 +18,7 @@ path = "src/anthropic.rs"
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
http.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
|
||||
use http::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{convert::TryFrom, time::Duration};
|
||||
@@ -20,6 +20,8 @@ pub enum Model {
|
||||
Claude3Sonnet,
|
||||
#[serde(alias = "claude-3-haiku", rename = "claude-3-haiku-20240307")]
|
||||
Claude3Haiku,
|
||||
#[serde(rename = "custom")]
|
||||
Custom { name: String, max_tokens: usize },
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -33,30 +35,38 @@ impl Model {
|
||||
} else if id.starts_with("claude-3-haiku") {
|
||||
Ok(Self::Claude3Haiku)
|
||||
} else {
|
||||
Err(anyhow!("Invalid model id: {}", id))
|
||||
Err(anyhow!("invalid model id"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &'static str {
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
|
||||
Model::Claude3Opus => "claude-3-opus-20240229",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-opus-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> &'static str {
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3Opus => "Claude 3 Opus",
|
||||
Self::Claude3Sonnet => "Claude 3 Sonnet",
|
||||
Self::Claude3Haiku => "Claude 3 Haiku",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
200_000
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => 200_000,
|
||||
Self::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +100,7 @@ impl From<Role> for String {
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Request {
|
||||
pub model: Model,
|
||||
pub model: String,
|
||||
pub messages: Vec<RequestMessage>,
|
||||
pub stream: bool,
|
||||
pub system: String,
|
||||
|
||||
@@ -23,15 +23,16 @@ test-support = [
|
||||
[dependencies]
|
||||
anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
assets.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
async-watch.workspace = true
|
||||
breadcrumbs.workspace = true
|
||||
cargo_toml.workspace = true
|
||||
chrono.workspace = true
|
||||
client.workspace = true
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
completion.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
@@ -40,10 +41,11 @@ fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
menu.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
@@ -63,21 +65,20 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
similar.workspace = true
|
||||
smol.workspace = true
|
||||
strsim = "0.11"
|
||||
strum.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
theme.workspace = true
|
||||
tiktoken-rs.workspace = true
|
||||
toml.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
workspace.workspace = true
|
||||
picker.workspace = true
|
||||
roxmltree = "0.20.0"
|
||||
|
||||
[dev-dependencies]
|
||||
completion = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
|
||||
@@ -38,7 +38,7 @@ Considering these aspects will ensure our conversation view design is optimized
|
||||
|
||||
@nate> 2 feels like it isn't important at the moment, we can explore that later. Let's start with 4, which I think will lead us to discussion 3 and 5.
|
||||
|
||||
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multuple peoople, or between multiple people and multiple bots (you and other bots).
|
||||
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multiple people, or between multiple people and multiple bots (you and other bots).
|
||||
|
||||
@nathan> Agreed. I'm interested in threading I think more than anything. Or 4 yeah. I think we need to scope the threading conversation. Also, asking #zed to propose the solution... not sure it will be that effective but it's worth a try...
|
||||
|
||||
|
||||
@@ -1,42 +1,40 @@
|
||||
pub mod assistant_panel;
|
||||
pub mod assistant_settings;
|
||||
mod completion_provider;
|
||||
mod context;
|
||||
pub mod context_store;
|
||||
mod inline_assistant;
|
||||
mod model_selector;
|
||||
mod prompt_library;
|
||||
mod prompts;
|
||||
mod search;
|
||||
mod slash_command;
|
||||
mod streaming_diff;
|
||||
mod terminal_inline_assistant;
|
||||
|
||||
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
|
||||
use assistant_settings::{AnthropicModel, AssistantSettings, CloudModel, OllamaModel, OpenAiModel};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
pub use completion_provider::*;
|
||||
use completion::LanguageModelCompletionProvider;
|
||||
pub use context::*;
|
||||
pub use context_store::*;
|
||||
use fs::Fs;
|
||||
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use gpui::{actions, impl_actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
pub(crate) use inline_assistant::*;
|
||||
use language_model::{
|
||||
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
|
||||
};
|
||||
pub(crate) use model_selector::*;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::{
|
||||
active_command, default_command, diagnostics_command, docs_command, fetch_command,
|
||||
file_command, now_command, project_command, prompt_command, search_command, symbols_command,
|
||||
tabs_command, term_command,
|
||||
};
|
||||
use std::{
|
||||
fmt::{self, Display},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
pub(crate) use streaming_diff::*;
|
||||
|
||||
actions!(
|
||||
@@ -49,16 +47,22 @@ actions!(
|
||||
InsertIntoEditor,
|
||||
ToggleFocus,
|
||||
ResetKey,
|
||||
InlineAssist,
|
||||
InsertActivePrompt,
|
||||
DeployHistory,
|
||||
DeployPromptLibrary,
|
||||
ApplyEdit,
|
||||
ConfirmCommand,
|
||||
ToggleModelSelector
|
||||
ToggleModelSelector,
|
||||
DebugEditSteps
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(Clone, Default, Deserialize, PartialEq)]
|
||||
pub struct InlineAssist {
|
||||
prompt: Option<String>,
|
||||
}
|
||||
|
||||
impl_actions!(assistant, [InlineAssist]);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct MessageId(clock::Lamport);
|
||||
|
||||
@@ -68,166 +72,6 @@ impl MessageId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
System,
|
||||
}
|
||||
|
||||
impl Role {
|
||||
pub fn from_proto(role: i32) -> Role {
|
||||
match proto::LanguageModelRole::from_i32(role) {
|
||||
Some(proto::LanguageModelRole::LanguageModelUser) => Role::User,
|
||||
Some(proto::LanguageModelRole::LanguageModelAssistant) => Role::Assistant,
|
||||
Some(proto::LanguageModelRole::LanguageModelSystem) => Role::System,
|
||||
Some(proto::LanguageModelRole::LanguageModelTool) => Role::System,
|
||||
None => Role::User,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_proto(&self) -> proto::LanguageModelRole {
|
||||
match self {
|
||||
Role::User => proto::LanguageModelRole::LanguageModelUser,
|
||||
Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant,
|
||||
Role::System => proto::LanguageModelRole::LanguageModelSystem,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cycle(self) -> Role {
|
||||
match self {
|
||||
Role::User => Role::Assistant,
|
||||
Role::Assistant => Role::System,
|
||||
Role::System => Role::User,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Role {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Role::User => write!(f, "user"),
|
||||
Role::Assistant => write!(f, "assistant"),
|
||||
Role::System => write!(f, "system"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
|
||||
pub enum LanguageModel {
|
||||
Cloud(CloudModel),
|
||||
OpenAi(OpenAiModel),
|
||||
Anthropic(AnthropicModel),
|
||||
Ollama(OllamaModel),
|
||||
}
|
||||
|
||||
impl Default for LanguageModel {
|
||||
fn default() -> Self {
|
||||
LanguageModel::Cloud(CloudModel::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModel {
|
||||
pub fn telemetry_id(&self) -> String {
|
||||
match self {
|
||||
LanguageModel::OpenAi(model) => format!("openai/{}", model.id()),
|
||||
LanguageModel::Anthropic(model) => format!("anthropic/{}", model.id()),
|
||||
LanguageModel::Cloud(model) => format!("zed.dev/{}", model.id()),
|
||||
LanguageModel::Ollama(model) => format!("ollama/{}", model.id()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> String {
|
||||
match self {
|
||||
LanguageModel::OpenAi(model) => model.display_name().into(),
|
||||
LanguageModel::Anthropic(model) => model.display_name().into(),
|
||||
LanguageModel::Cloud(model) => model.display_name().into(),
|
||||
LanguageModel::Ollama(model) => model.display_name().into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
LanguageModel::OpenAi(model) => model.max_token_count(),
|
||||
LanguageModel::Anthropic(model) => model.max_token_count(),
|
||||
LanguageModel::Cloud(model) => model.max_token_count(),
|
||||
LanguageModel::Ollama(model) => model.max_token_count(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
LanguageModel::OpenAi(model) => model.id(),
|
||||
LanguageModel::Anthropic(model) => model.id(),
|
||||
LanguageModel::Cloud(model) => model.id(),
|
||||
LanguageModel::Ollama(model) => model.id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct LanguageModelRequestMessage {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
impl LanguageModelRequestMessage {
|
||||
pub fn to_proto(&self) -> proto::LanguageModelRequestMessage {
|
||||
proto::LanguageModelRequestMessage {
|
||||
role: self.role.to_proto() as i32,
|
||||
content: self.content.clone(),
|
||||
tool_calls: Vec::new(),
|
||||
tool_call_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct LanguageModelRequest {
|
||||
pub model: LanguageModel,
|
||||
pub messages: Vec<LanguageModelRequestMessage>,
|
||||
pub stop: Vec<String>,
|
||||
pub temperature: f32,
|
||||
}
|
||||
|
||||
impl LanguageModelRequest {
|
||||
pub fn to_proto(&self) -> proto::CompleteWithLanguageModel {
|
||||
proto::CompleteWithLanguageModel {
|
||||
model: self.model.id().to_string(),
|
||||
messages: self.messages.iter().map(|m| m.to_proto()).collect(),
|
||||
stop: self.stop.clone(),
|
||||
temperature: self.temperature,
|
||||
tool_choice: None,
|
||||
tools: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Before we send the request to the server, we can perform fixups on it appropriate to the model.
|
||||
pub fn preprocess(&mut self) {
|
||||
match &self.model {
|
||||
LanguageModel::OpenAi(_) => {}
|
||||
LanguageModel::Anthropic(_) => {}
|
||||
LanguageModel::Ollama(_) => {}
|
||||
LanguageModel::Cloud(model) => match model {
|
||||
CloudModel::Claude3Opus
|
||||
| CloudModel::Claude3Sonnet
|
||||
| CloudModel::Claude3Haiku
|
||||
| CloudModel::Claude3_5Sonnet => {
|
||||
preprocess_anthropic_request(self);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct LanguageModelResponseMessage {
|
||||
pub role: Option<Role>,
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct LanguageModelUsage {
|
||||
pub prompt_tokens: u32,
|
||||
@@ -321,6 +165,16 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
cx.set_global(Assistant::default());
|
||||
AssistantSettings::register(cx);
|
||||
|
||||
// TODO: remove this when 0.148.0 is released.
|
||||
if AssistantSettings::get_global(cx).using_outdated_settings_version {
|
||||
update_settings_file::<AssistantSettings>(fs.clone(), cx, {
|
||||
let fs = fs.clone();
|
||||
|content, cx| {
|
||||
content.update_file(fs, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
cx.spawn(|mut cx| {
|
||||
let client = client.clone();
|
||||
async move {
|
||||
@@ -338,7 +192,7 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
|
||||
context_store::init(&client);
|
||||
prompt_library::init(cx);
|
||||
completion_provider::init(client.clone(), cx);
|
||||
init_completion_provider(cx);
|
||||
assistant_slash_command::init(cx);
|
||||
register_slash_commands(cx);
|
||||
assistant_panel::init(cx);
|
||||
@@ -363,6 +217,38 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn init_completion_provider(cx: &mut AppContext) {
|
||||
completion::init(cx);
|
||||
update_active_language_model_from_settings(cx);
|
||||
|
||||
cx.observe_global::<SettingsStore>(update_active_language_model_from_settings)
|
||||
.detach();
|
||||
cx.observe(&LanguageModelRegistry::global(cx), |_, cx| {
|
||||
update_active_language_model_from_settings(cx)
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn update_active_language_model_from_settings(cx: &mut AppContext) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
|
||||
let Some(provider) = LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.provider(&provider_name)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let models = provider.provided_models(cx);
|
||||
if let Some(model) = models.iter().find(|model| model.id() == model_id).cloned() {
|
||||
LanguageModelCompletionProvider::global(cx).update(cx, |completion_provider, cx| {
|
||||
completion_provider.set_active_model(model, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn register_slash_commands(cx: &mut AppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
slash_command_registry.register_command(file_command::FileSlashCommand, true);
|
||||
|
||||
@@ -1,162 +1,14 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{preprocess_anthropic_request, LanguageModel, LanguageModelRequest};
|
||||
pub use anthropic::Model as AnthropicModel;
|
||||
use gpui::Pixels;
|
||||
pub use ollama::Model as OllamaModel;
|
||||
pub use open_ai::Model as OpenAiModel;
|
||||
use schemars::{
|
||||
schema::{InstanceType, Metadata, Schema, SchemaObject},
|
||||
JsonSchema,
|
||||
};
|
||||
use serde::{
|
||||
de::{self, Visitor},
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use strum::{EnumIter, IntoEnumIterator};
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, EnumIter)]
|
||||
pub enum CloudModel {
|
||||
Gpt3Point5Turbo,
|
||||
Gpt4,
|
||||
Gpt4Turbo,
|
||||
#[default]
|
||||
Gpt4Omni,
|
||||
Claude3_5Sonnet,
|
||||
Claude3Opus,
|
||||
Claude3Sonnet,
|
||||
Claude3Haiku,
|
||||
Gemini15Pro,
|
||||
Gemini15Flash,
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
impl Serialize for CloudModel {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.id())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for CloudModel {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct ZedDotDevModelVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for ZedDotDevModelVisitor {
|
||||
type Value = CloudModel;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("a string for a ZedDotDevModel variant or a custom model")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
let model = CloudModel::iter()
|
||||
.find(|model| model.id() == value)
|
||||
.unwrap_or_else(|| CloudModel::Custom(value.to_string()));
|
||||
Ok(model)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_str(ZedDotDevModelVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonSchema for CloudModel {
|
||||
fn schema_name() -> String {
|
||||
"ZedDotDevModel".to_owned()
|
||||
}
|
||||
|
||||
fn json_schema(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
|
||||
let variants = CloudModel::iter()
|
||||
.filter_map(|model| {
|
||||
let id = model.id();
|
||||
if id.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(id.to_string())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Schema::Object(SchemaObject {
|
||||
instance_type: Some(InstanceType::String.into()),
|
||||
enum_values: Some(variants.iter().map(|s| s.clone().into()).collect()),
|
||||
metadata: Some(Box::new(Metadata {
|
||||
title: Some("ZedDotDevModel".to_owned()),
|
||||
default: Some(CloudModel::default().id().into()),
|
||||
examples: variants.into_iter().map(Into::into).collect(),
|
||||
..Default::default()
|
||||
})),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl CloudModel {
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Self::Gpt3Point5Turbo => "gpt-3.5-turbo",
|
||||
Self::Gpt4 => "gpt-4",
|
||||
Self::Gpt4Turbo => "gpt-4-turbo-preview",
|
||||
Self::Gpt4Omni => "gpt-4o",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet",
|
||||
Self::Claude3Opus => "claude-3-opus",
|
||||
Self::Claude3Sonnet => "claude-3-sonnet",
|
||||
Self::Claude3Haiku => "claude-3-haiku",
|
||||
Self::Gemini15Pro => "gemini-1.5-pro",
|
||||
Self::Gemini15Flash => "gemini-1.5-flash",
|
||||
Self::Custom(id) => id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
Self::Gpt3Point5Turbo => "GPT 3.5 Turbo",
|
||||
Self::Gpt4 => "GPT 4",
|
||||
Self::Gpt4Turbo => "GPT 4 Turbo",
|
||||
Self::Gpt4Omni => "GPT 4 Omni",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3Opus => "Claude 3 Opus",
|
||||
Self::Claude3Sonnet => "Claude 3 Sonnet",
|
||||
Self::Claude3Haiku => "Claude 3 Haiku",
|
||||
Self::Gemini15Pro => "Gemini 1.5 Pro",
|
||||
Self::Gemini15Flash => "Gemini 1.5 Flash",
|
||||
Self::Custom(id) => id.as_str(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
Self::Gpt3Point5Turbo => 2048,
|
||||
Self::Gpt4 => 4096,
|
||||
Self::Gpt4Turbo | Self::Gpt4Omni => 128000,
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => 200000,
|
||||
Self::Gemini15Pro => 128000,
|
||||
Self::Gemini15Flash => 32000,
|
||||
Self::Custom(_) => 4096, // TODO: Make this configurable
|
||||
}
|
||||
}
|
||||
|
||||
pub fn preprocess_request(&self, request: &mut LanguageModelRequest) {
|
||||
match self {
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => {
|
||||
preprocess_anthropic_request(request)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
use anthropic::Model as AnthropicModel;
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Pixels};
|
||||
use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
|
||||
use ollama::Model as OllamaModel;
|
||||
use open_ai::Model as OpenAiModel;
|
||||
use schemars::{schema::Schema, JsonSchema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsSources};
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
@@ -167,43 +19,9 @@ pub enum AssistantDockPosition {
|
||||
Bottom,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum AssistantProvider {
|
||||
ZedDotDev {
|
||||
model: CloudModel,
|
||||
},
|
||||
OpenAi {
|
||||
model: OpenAiModel,
|
||||
api_url: String,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
available_models: Vec<OpenAiModel>,
|
||||
},
|
||||
Anthropic {
|
||||
model: AnthropicModel,
|
||||
api_url: String,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
},
|
||||
Ollama {
|
||||
model: OllamaModel,
|
||||
api_url: String,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for AssistantProvider {
|
||||
fn default() -> Self {
|
||||
Self::OpenAi {
|
||||
model: OpenAiModel::default(),
|
||||
api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[serde(tag = "name", rename_all = "snake_case")]
|
||||
pub enum AssistantProviderContent {
|
||||
pub enum AssistantProviderContentV1 {
|
||||
#[serde(rename = "zed.dev")]
|
||||
ZedDotDev { default_model: Option<CloudModel> },
|
||||
#[serde(rename = "openai")]
|
||||
@@ -234,7 +52,8 @@ pub struct AssistantSettings {
|
||||
pub dock: AssistantDockPosition,
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub provider: AssistantProvider,
|
||||
pub default_model: AssistantDefaultModel,
|
||||
pub using_outdated_settings_version: bool,
|
||||
}
|
||||
|
||||
/// Assistant panel settings
|
||||
@@ -266,34 +85,142 @@ impl Default for AssistantSettingsContent {
|
||||
}
|
||||
|
||||
impl AssistantSettingsContent {
|
||||
fn upgrade(&self) -> AssistantSettingsContentV1 {
|
||||
pub fn is_version_outdated(&self) -> bool {
|
||||
match self {
|
||||
AssistantSettingsContent::Versioned(settings) => match settings {
|
||||
VersionedAssistantSettingsContent::V1(settings) => settings.clone(),
|
||||
VersionedAssistantSettingsContent::V1(_) => true,
|
||||
VersionedAssistantSettingsContent::V2(_) => false,
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV1 {
|
||||
AssistantSettingsContent::Legacy(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
|
||||
if let AssistantSettingsContent::Versioned(settings) = self {
|
||||
if let VersionedAssistantSettingsContent::V1(settings) = settings {
|
||||
if let Some(provider) = settings.provider.clone() {
|
||||
match provider {
|
||||
AssistantProviderContentV1::Anthropic {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
} => update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.anthropic.is_none() {
|
||||
content.anthropic =
|
||||
Some(language_model::settings::AnthropicSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
},
|
||||
),
|
||||
AssistantProviderContentV1::Ollama {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
} => update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.ollama.is_none() {
|
||||
content.ollama =
|
||||
Some(language_model::settings::OllamaSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
}
|
||||
},
|
||||
),
|
||||
AssistantProviderContentV1::OpenAi {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
..
|
||||
} => update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.openai.is_none() {
|
||||
content.openai =
|
||||
Some(language_model::settings::OpenAiSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
});
|
||||
}
|
||||
},
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
|
||||
self.upgrade(),
|
||||
));
|
||||
}
|
||||
|
||||
fn upgrade(&self) -> AssistantSettingsContentV2 {
|
||||
match self {
|
||||
AssistantSettingsContent::Versioned(settings) => match settings {
|
||||
VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
|
||||
enabled: settings.enabled,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
default_height: settings.default_width,
|
||||
default_model: settings
|
||||
.provider
|
||||
.clone()
|
||||
.and_then(|provider| match provider {
|
||||
AssistantProviderContentV1::ZedDotDev { default_model } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
provider: "zed.dev".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::OpenAi { default_model, .. } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
provider: "openai".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::Anthropic { default_model, .. } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
provider: "anthropic".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::Ollama { default_model, .. } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
provider: "ollama".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
}),
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
|
||||
enabled: None,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
default_height: settings.default_height,
|
||||
provider: if let Some(open_ai_api_url) = settings.openai_api_url.as_ref() {
|
||||
Some(AssistantProviderContent::OpenAi {
|
||||
default_model: settings.default_open_ai_model.clone(),
|
||||
api_url: Some(open_ai_api_url.clone()),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Some(Default::default()),
|
||||
})
|
||||
} else {
|
||||
settings.default_open_ai_model.clone().map(|open_ai_model| {
|
||||
AssistantProviderContent::OpenAi {
|
||||
default_model: Some(open_ai_model),
|
||||
api_url: None,
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Some(Default::default()),
|
||||
}
|
||||
})
|
||||
},
|
||||
default_model: Some(AssistantDefaultModel {
|
||||
provider: "openai".to_string(),
|
||||
model: settings
|
||||
.default_open_ai_model
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -304,6 +231,9 @@ impl AssistantSettingsContent {
|
||||
VersionedAssistantSettingsContent::V1(settings) => {
|
||||
settings.dock = Some(dock);
|
||||
}
|
||||
VersionedAssistantSettingsContent::V2(settings) => {
|
||||
settings.dock = Some(dock);
|
||||
}
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => {
|
||||
settings.dock = Some(dock);
|
||||
@@ -311,74 +241,78 @@ impl AssistantSettingsContent {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_model(&mut self, new_model: LanguageModel) {
|
||||
pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
|
||||
let model = language_model.id().0.to_string();
|
||||
let provider = language_model.provider_id().0.to_string();
|
||||
|
||||
match self {
|
||||
AssistantSettingsContent::Versioned(settings) => match settings {
|
||||
VersionedAssistantSettingsContent::V1(settings) => match &mut settings.provider {
|
||||
Some(AssistantProviderContent::ZedDotDev {
|
||||
default_model: model,
|
||||
}) => {
|
||||
if let LanguageModel::Cloud(new_model) = new_model {
|
||||
*model = Some(new_model);
|
||||
}
|
||||
VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
|
||||
"zed.dev" => {
|
||||
settings.provider = Some(AssistantProviderContentV1::ZedDotDev {
|
||||
default_model: CloudModel::from_id(&model).ok(),
|
||||
});
|
||||
}
|
||||
Some(AssistantProviderContent::OpenAi {
|
||||
default_model: model,
|
||||
..
|
||||
}) => {
|
||||
if let LanguageModel::OpenAi(new_model) = new_model {
|
||||
*model = Some(new_model);
|
||||
}
|
||||
"anthropic" => {
|
||||
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::Anthropic {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
}) => (api_url.clone(), *low_speed_timeout_in_seconds),
|
||||
_ => (None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::Anthropic {
|
||||
default_model: AnthropicModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
}
|
||||
Some(AssistantProviderContent::Anthropic {
|
||||
default_model: model,
|
||||
..
|
||||
}) => {
|
||||
if let LanguageModel::Anthropic(new_model) = new_model {
|
||||
*model = Some(new_model);
|
||||
}
|
||||
"ollama" => {
|
||||
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::Ollama {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
}) => (api_url.clone(), *low_speed_timeout_in_seconds),
|
||||
_ => (None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::Ollama {
|
||||
default_model: Some(ollama::Model::new(&model)),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
}
|
||||
Some(AssistantProviderContent::Ollama {
|
||||
default_model: model,
|
||||
..
|
||||
}) => {
|
||||
if let LanguageModel::Ollama(new_model) = new_model {
|
||||
*model = Some(new_model);
|
||||
}
|
||||
"openai" => {
|
||||
let (api_url, low_speed_timeout_in_seconds, available_models) =
|
||||
match &settings.provider {
|
||||
Some(AssistantProviderContentV1::OpenAi {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
..
|
||||
}) => (
|
||||
api_url.clone(),
|
||||
*low_speed_timeout_in_seconds,
|
||||
available_models.clone(),
|
||||
),
|
||||
_ => (None, None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::OpenAi {
|
||||
default_model: open_ai::Model::from_id(&model).ok(),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
});
|
||||
}
|
||||
provider => match new_model {
|
||||
LanguageModel::Cloud(model) => {
|
||||
*provider = Some(AssistantProviderContent::ZedDotDev {
|
||||
default_model: Some(model),
|
||||
})
|
||||
}
|
||||
LanguageModel::OpenAi(model) => {
|
||||
*provider = Some(AssistantProviderContent::OpenAi {
|
||||
default_model: Some(model),
|
||||
api_url: None,
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Some(Default::default()),
|
||||
})
|
||||
}
|
||||
LanguageModel::Anthropic(model) => {
|
||||
*provider = Some(AssistantProviderContent::Anthropic {
|
||||
default_model: Some(model),
|
||||
api_url: None,
|
||||
low_speed_timeout_in_seconds: None,
|
||||
})
|
||||
}
|
||||
LanguageModel::Ollama(model) => {
|
||||
*provider = Some(AssistantProviderContent::Ollama {
|
||||
default_model: Some(model),
|
||||
api_url: None,
|
||||
low_speed_timeout_in_seconds: None,
|
||||
})
|
||||
}
|
||||
},
|
||||
_ => {}
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => {
|
||||
settings.default_model = Some(AssistantDefaultModel { provider, model });
|
||||
}
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => {
|
||||
if let LanguageModel::OpenAi(model) = new_model {
|
||||
if let Ok(model) = open_ai::Model::from_id(&language_model.id().0) {
|
||||
settings.default_open_ai_model = Some(model);
|
||||
}
|
||||
}
|
||||
@@ -391,21 +325,78 @@ impl AssistantSettingsContent {
|
||||
pub enum VersionedAssistantSettingsContent {
|
||||
#[serde(rename = "1")]
|
||||
V1(AssistantSettingsContentV1),
|
||||
#[serde(rename = "2")]
|
||||
V2(AssistantSettingsContentV2),
|
||||
}
|
||||
|
||||
impl Default for VersionedAssistantSettingsContent {
|
||||
fn default() -> Self {
|
||||
Self::V1(AssistantSettingsContentV1 {
|
||||
Self::V2(AssistantSettingsContentV2 {
|
||||
enabled: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
provider: None,
|
||||
default_model: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct AssistantSettingsContentV2 {
|
||||
/// Whether the Assistant is enabled.
|
||||
///
|
||||
/// Default: true
|
||||
enabled: Option<bool>,
|
||||
/// Whether to show the assistant panel button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
button: Option<bool>,
|
||||
/// Where to dock the assistant.
|
||||
///
|
||||
/// Default: right
|
||||
dock: Option<AssistantDockPosition>,
|
||||
/// Default width in pixels when the assistant is docked to the left or right.
|
||||
///
|
||||
/// Default: 640
|
||||
default_width: Option<f32>,
|
||||
/// Default height in pixels when the assistant is docked to the bottom.
|
||||
///
|
||||
/// Default: 320
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new contexts.
|
||||
default_model: Option<AssistantDefaultModel>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
pub struct AssistantDefaultModel {
|
||||
#[schemars(schema_with = "providers_schema")]
|
||||
pub provider: String,
|
||||
pub model: String,
|
||||
}
|
||||
|
||||
fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
enum_values: Some(vec![
|
||||
"anthropic".into(),
|
||||
"ollama".into(),
|
||||
"openai".into(),
|
||||
"zed.dev".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
impl Default for AssistantDefaultModel {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
provider: "openai".to_string(),
|
||||
model: "gpt-4".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct AssistantSettingsContentV1 {
|
||||
/// Whether the Assistant is enabled.
|
||||
@@ -432,7 +423,7 @@ pub struct AssistantSettingsContentV1 {
|
||||
///
|
||||
/// This can either be the internal `zed.dev` service or an external `openai` service,
|
||||
/// each with their respective default models and configurations.
|
||||
provider: Option<AssistantProviderContent>,
|
||||
provider: Option<AssistantProviderContentV1>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
@@ -475,6 +466,10 @@ impl Settings for AssistantSettings {
|
||||
let mut settings = AssistantSettings::default();
|
||||
|
||||
for value in sources.defaults_and_customizations() {
|
||||
if value.is_version_outdated() {
|
||||
settings.using_outdated_settings_version = true;
|
||||
}
|
||||
|
||||
let value = value.upgrade();
|
||||
merge(&mut settings.enabled, value.enabled);
|
||||
merge(&mut settings.button, value.button);
|
||||
@@ -487,123 +482,10 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.default_height,
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
if let Some(provider) = value.provider.clone() {
|
||||
match (&mut settings.provider, provider) {
|
||||
(
|
||||
AssistantProvider::ZedDotDev { model },
|
||||
AssistantProviderContent::ZedDotDev {
|
||||
default_model: model_override,
|
||||
},
|
||||
) => {
|
||||
merge(model, model_override);
|
||||
}
|
||||
(
|
||||
AssistantProvider::OpenAi {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
},
|
||||
AssistantProviderContent::OpenAi {
|
||||
default_model: model_override,
|
||||
api_url: api_url_override,
|
||||
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
|
||||
available_models: available_models_override,
|
||||
},
|
||||
) => {
|
||||
merge(model, model_override);
|
||||
merge(api_url, api_url_override);
|
||||
merge(available_models, available_models_override);
|
||||
if let Some(low_speed_timeout_in_seconds_override) =
|
||||
low_speed_timeout_in_seconds_override
|
||||
{
|
||||
*low_speed_timeout_in_seconds =
|
||||
Some(low_speed_timeout_in_seconds_override);
|
||||
}
|
||||
}
|
||||
(
|
||||
AssistantProvider::Ollama {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
},
|
||||
AssistantProviderContent::Ollama {
|
||||
default_model: model_override,
|
||||
api_url: api_url_override,
|
||||
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
|
||||
},
|
||||
) => {
|
||||
merge(model, model_override);
|
||||
merge(api_url, api_url_override);
|
||||
if let Some(low_speed_timeout_in_seconds_override) =
|
||||
low_speed_timeout_in_seconds_override
|
||||
{
|
||||
*low_speed_timeout_in_seconds =
|
||||
Some(low_speed_timeout_in_seconds_override);
|
||||
}
|
||||
}
|
||||
(
|
||||
AssistantProvider::Anthropic {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
},
|
||||
AssistantProviderContent::Anthropic {
|
||||
default_model: model_override,
|
||||
api_url: api_url_override,
|
||||
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
|
||||
},
|
||||
) => {
|
||||
merge(model, model_override);
|
||||
merge(api_url, api_url_override);
|
||||
if let Some(low_speed_timeout_in_seconds_override) =
|
||||
low_speed_timeout_in_seconds_override
|
||||
{
|
||||
*low_speed_timeout_in_seconds =
|
||||
Some(low_speed_timeout_in_seconds_override);
|
||||
}
|
||||
}
|
||||
(provider, provider_override) => {
|
||||
*provider = match provider_override {
|
||||
AssistantProviderContent::ZedDotDev {
|
||||
default_model: model,
|
||||
} => AssistantProvider::ZedDotDev {
|
||||
model: model.unwrap_or_default(),
|
||||
},
|
||||
AssistantProviderContent::OpenAi {
|
||||
default_model: model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
} => AssistantProvider::OpenAi {
|
||||
model: model.unwrap_or_default(),
|
||||
api_url: api_url.unwrap_or_else(|| open_ai::OPEN_AI_API_URL.into()),
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: available_models.unwrap_or_default(),
|
||||
},
|
||||
AssistantProviderContent::Anthropic {
|
||||
default_model: model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
} => AssistantProvider::Anthropic {
|
||||
model: model.unwrap_or_default(),
|
||||
api_url: api_url
|
||||
.unwrap_or_else(|| anthropic::ANTHROPIC_API_URL.into()),
|
||||
low_speed_timeout_in_seconds,
|
||||
},
|
||||
AssistantProviderContent::Ollama {
|
||||
default_model: model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
} => AssistantProvider::Ollama {
|
||||
model: model.unwrap_or_default(),
|
||||
api_url: api_url.unwrap_or_else(|| ollama::OLLAMA_API_URL.into()),
|
||||
low_speed_timeout_in_seconds,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
merge(
|
||||
&mut settings.default_model,
|
||||
value.default_model.map(Into::into),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
@@ -616,96 +498,103 @@ fn merge<T>(target: &mut T, value: Option<T>) {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gpui::{AppContext, UpdateGlobal};
|
||||
use settings::SettingsStore;
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use gpui::{AppContext, UpdateGlobal};
|
||||
// use settings::SettingsStore;
|
||||
|
||||
use super::*;
|
||||
// use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_deserialize_assistant_settings(cx: &mut AppContext) {
|
||||
let store = settings::SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
// #[gpui::test]
|
||||
// fn test_deserialize_assistant_settings(cx: &mut AppContext) {
|
||||
// let store = settings::SettingsStore::test(cx);
|
||||
// cx.set_global(store);
|
||||
|
||||
// Settings default to gpt-4-turbo.
|
||||
AssistantSettings::register(cx);
|
||||
assert_eq!(
|
||||
AssistantSettings::get_global(cx).provider,
|
||||
AssistantProvider::OpenAi {
|
||||
model: OpenAiModel::FourOmni,
|
||||
api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
);
|
||||
// // Settings default to gpt-4-turbo.
|
||||
// AssistantSettings::register(cx);
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::OpenAi {
|
||||
// model: OpenAiModel::FourOmni,
|
||||
// api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
// low_speed_timeout_in_seconds: None,
|
||||
// available_models: Default::default(),
|
||||
// }
|
||||
// );
|
||||
|
||||
// Ensure backward-compatibility.
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store
|
||||
.set_user_settings(
|
||||
r#"{
|
||||
"assistant": {
|
||||
"openai_api_url": "test-url",
|
||||
}
|
||||
}"#,
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
assert_eq!(
|
||||
AssistantSettings::get_global(cx).provider,
|
||||
AssistantProvider::OpenAi {
|
||||
model: OpenAiModel::FourOmni,
|
||||
api_url: "test-url".into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
);
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store
|
||||
.set_user_settings(
|
||||
r#"{
|
||||
"assistant": {
|
||||
"default_open_ai_model": "gpt-4-0613"
|
||||
}
|
||||
}"#,
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
assert_eq!(
|
||||
AssistantSettings::get_global(cx).provider,
|
||||
AssistantProvider::OpenAi {
|
||||
model: OpenAiModel::Four,
|
||||
api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
);
|
||||
// // Ensure backward-compatibility.
|
||||
// SettingsStore::update_global(cx, |store, cx| {
|
||||
// store
|
||||
// .set_user_settings(
|
||||
// r#"{
|
||||
// "assistant": {
|
||||
// "openai_api_url": "test-url",
|
||||
// }
|
||||
// }"#,
|
||||
// cx,
|
||||
// )
|
||||
// .unwrap();
|
||||
// });
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::OpenAi {
|
||||
// model: OpenAiModel::FourOmni,
|
||||
// api_url: "test-url".into(),
|
||||
// low_speed_timeout_in_seconds: None,
|
||||
// available_models: Default::default(),
|
||||
// }
|
||||
// );
|
||||
// SettingsStore::update_global(cx, |store, cx| {
|
||||
// store
|
||||
// .set_user_settings(
|
||||
// r#"{
|
||||
// "assistant": {
|
||||
// "default_open_ai_model": "gpt-4-0613"
|
||||
// }
|
||||
// }"#,
|
||||
// cx,
|
||||
// )
|
||||
// .unwrap();
|
||||
// });
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::OpenAi {
|
||||
// model: OpenAiModel::Four,
|
||||
// api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
// low_speed_timeout_in_seconds: None,
|
||||
// available_models: Default::default(),
|
||||
// }
|
||||
// );
|
||||
|
||||
// The new version supports setting a custom model when using zed.dev.
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store
|
||||
.set_user_settings(
|
||||
r#"{
|
||||
"assistant": {
|
||||
"version": "1",
|
||||
"provider": {
|
||||
"name": "zed.dev",
|
||||
"default_model": "custom"
|
||||
}
|
||||
}
|
||||
}"#,
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
assert_eq!(
|
||||
AssistantSettings::get_global(cx).provider,
|
||||
AssistantProvider::ZedDotDev {
|
||||
model: CloudModel::Custom("custom".into())
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
// // The new version supports setting a custom model when using zed.dev.
|
||||
// SettingsStore::update_global(cx, |store, cx| {
|
||||
// store
|
||||
// .set_user_settings(
|
||||
// r#"{
|
||||
// "assistant": {
|
||||
// "version": "1",
|
||||
// "provider": {
|
||||
// "name": "zed.dev",
|
||||
// "default_model": {
|
||||
// "custom": {
|
||||
// "name": "custom-provider"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }"#,
|
||||
// cx,
|
||||
// )
|
||||
// .unwrap();
|
||||
// });
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::ZedDotDev {
|
||||
// model: CloudModel::Custom {
|
||||
// name: "custom-provider".into(),
|
||||
// max_tokens: None
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
@@ -1,373 +0,0 @@
|
||||
mod anthropic;
|
||||
mod cloud;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
mod fake;
|
||||
mod ollama;
|
||||
mod open_ai;
|
||||
|
||||
pub use anthropic::*;
|
||||
pub use cloud::*;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub use fake::*;
|
||||
pub use ollama::*;
|
||||
pub use open_ai::*;
|
||||
use parking_lot::RwLock;
|
||||
use smol::lock::{Semaphore, SemaphoreGuardArc};
|
||||
|
||||
use crate::{
|
||||
assistant_settings::{AssistantProvider, AssistantSettings},
|
||||
LanguageModel, LanguageModelRequest,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use client::Client;
|
||||
use futures::{future::BoxFuture, stream::BoxStream};
|
||||
use gpui::{AnyView, AppContext, BorrowAppContext, Task, WindowContext};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::time::Duration;
|
||||
use std::{any::Any, sync::Arc};
|
||||
|
||||
/// Choose which model to use for openai provider.
|
||||
/// If the model is not available, try to use the first available model, or fallback to the original model.
|
||||
fn choose_openai_model(
|
||||
model: &::open_ai::Model,
|
||||
available_models: &[::open_ai::Model],
|
||||
) -> ::open_ai::Model {
|
||||
available_models
|
||||
.iter()
|
||||
.find(|&m| m == model)
|
||||
.or_else(|| available_models.first())
|
||||
.unwrap_or_else(|| model)
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
let provider = create_provider_from_settings(client.clone(), 0, cx);
|
||||
cx.set_global(CompletionProvider::new(provider, Some(client)));
|
||||
|
||||
let mut settings_version = 0;
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
settings_version += 1;
|
||||
cx.update_global::<CompletionProvider, _>(|provider, cx| {
|
||||
provider.update_settings(settings_version, cx);
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub struct CompletionResponse {
|
||||
pub inner: BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>,
|
||||
_lock: SemaphoreGuardArc,
|
||||
}
|
||||
|
||||
pub trait LanguageModelCompletionProvider: Send + Sync {
|
||||
fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel>;
|
||||
fn settings_version(&self) -> usize;
|
||||
fn is_authenticated(&self) -> bool;
|
||||
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>>;
|
||||
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView;
|
||||
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>>;
|
||||
fn model(&self) -> LanguageModel;
|
||||
fn count_tokens(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>>;
|
||||
fn complete(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn Any;
|
||||
}
|
||||
|
||||
const MAX_CONCURRENT_COMPLETION_REQUESTS: usize = 4;
|
||||
|
||||
pub struct CompletionProvider {
|
||||
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
|
||||
client: Option<Arc<Client>>,
|
||||
request_limiter: Arc<Semaphore>,
|
||||
}
|
||||
|
||||
impl CompletionProvider {
|
||||
pub fn new(
|
||||
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
|
||||
client: Option<Arc<Client>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
provider,
|
||||
client,
|
||||
request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_COMPLETION_REQUESTS)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel> {
|
||||
self.provider.read().available_models(cx)
|
||||
}
|
||||
|
||||
pub fn settings_version(&self) -> usize {
|
||||
self.provider.read().settings_version()
|
||||
}
|
||||
|
||||
pub fn is_authenticated(&self) -> bool {
|
||||
self.provider.read().is_authenticated()
|
||||
}
|
||||
|
||||
pub fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
|
||||
self.provider.read().authenticate(cx)
|
||||
}
|
||||
|
||||
pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
|
||||
self.provider.read().authentication_prompt(cx)
|
||||
}
|
||||
|
||||
pub fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
|
||||
self.provider.read().reset_credentials(cx)
|
||||
}
|
||||
|
||||
pub fn model(&self) -> LanguageModel {
|
||||
self.provider.read().model()
|
||||
}
|
||||
|
||||
pub fn count_tokens(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
self.provider.read().count_tokens(request, cx)
|
||||
}
|
||||
|
||||
pub fn complete(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> Task<CompletionResponse> {
|
||||
let rate_limiter = self.request_limiter.clone();
|
||||
let provider = self.provider.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
let lock = rate_limiter.acquire_arc().await;
|
||||
let response = provider.read().complete(request);
|
||||
CompletionResponse {
|
||||
inner: response,
|
||||
_lock: lock,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl gpui::Global for CompletionProvider {}
|
||||
|
||||
impl CompletionProvider {
|
||||
pub fn global(cx: &AppContext) -> &Self {
|
||||
cx.global::<Self>()
|
||||
}
|
||||
|
||||
pub fn update_current_as<R, T: LanguageModelCompletionProvider + 'static>(
|
||||
&mut self,
|
||||
update: impl FnOnce(&mut T) -> R,
|
||||
) -> Option<R> {
|
||||
let mut provider = self.provider.write();
|
||||
if let Some(provider) = provider.as_any_mut().downcast_mut::<T>() {
|
||||
Some(update(provider))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_settings(&mut self, version: usize, cx: &mut AppContext) {
|
||||
let updated = match &AssistantSettings::get_global(cx).provider {
|
||||
AssistantProvider::ZedDotDev { model } => self
|
||||
.update_current_as::<_, CloudCompletionProvider>(|provider| {
|
||||
provider.update(model.clone(), version);
|
||||
}),
|
||||
AssistantProvider::OpenAi {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
} => self.update_current_as::<_, OpenAiCompletionProvider>(|provider| {
|
||||
provider.update(
|
||||
choose_openai_model(&model, &available_models),
|
||||
api_url.clone(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
version,
|
||||
);
|
||||
}),
|
||||
AssistantProvider::Anthropic {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
} => self.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
|
||||
provider.update(
|
||||
model.clone(),
|
||||
api_url.clone(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
version,
|
||||
);
|
||||
}),
|
||||
AssistantProvider::Ollama {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
} => self.update_current_as::<_, OllamaCompletionProvider>(|provider| {
|
||||
provider.update(
|
||||
model.clone(),
|
||||
api_url.clone(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
version,
|
||||
cx,
|
||||
);
|
||||
}),
|
||||
};
|
||||
|
||||
// Previously configured provider was changed to another one
|
||||
if updated.is_none() {
|
||||
if let Some(client) = self.client.clone() {
|
||||
self.provider = create_provider_from_settings(client, version, cx);
|
||||
} else {
|
||||
log::warn!("completion provider cannot be created because client is not set");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_provider_from_settings(
|
||||
client: Arc<Client>,
|
||||
settings_version: usize,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<RwLock<dyn LanguageModelCompletionProvider>> {
|
||||
match &AssistantSettings::get_global(cx).provider {
|
||||
AssistantProvider::ZedDotDev { model } => Arc::new(RwLock::new(
|
||||
CloudCompletionProvider::new(model.clone(), client.clone(), settings_version, cx),
|
||||
)),
|
||||
AssistantProvider::OpenAi {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
} => Arc::new(RwLock::new(OpenAiCompletionProvider::new(
|
||||
choose_openai_model(&model, &available_models),
|
||||
api_url.clone(),
|
||||
client.http_client(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
settings_version,
|
||||
))),
|
||||
AssistantProvider::Anthropic {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
} => Arc::new(RwLock::new(AnthropicCompletionProvider::new(
|
||||
model.clone(),
|
||||
api_url.clone(),
|
||||
client.http_client(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
settings_version,
|
||||
))),
|
||||
AssistantProvider::Ollama {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
} => Arc::new(RwLock::new(OllamaCompletionProvider::new(
|
||||
model.clone(),
|
||||
api_url.clone(),
|
||||
client.http_client(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
settings_version,
|
||||
cx,
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use gpui::AppContext;
|
||||
use parking_lot::RwLock;
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
|
||||
use crate::{
|
||||
completion_provider::MAX_CONCURRENT_COMPLETION_REQUESTS, CompletionProvider,
|
||||
FakeCompletionProvider, LanguageModelRequest,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_rate_limiting(cx: &mut AppContext) {
|
||||
SettingsStore::test(cx);
|
||||
let fake_provider = FakeCompletionProvider::setup_test(cx);
|
||||
|
||||
let provider = CompletionProvider::new(Arc::new(RwLock::new(fake_provider.clone())), None);
|
||||
|
||||
// Enqueue some requests
|
||||
for i in 0..MAX_CONCURRENT_COMPLETION_REQUESTS * 2 {
|
||||
let response = provider.complete(
|
||||
LanguageModelRequest {
|
||||
temperature: i as f32 / 10.0,
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
);
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let response = response.await;
|
||||
let mut stream = response.inner.await.unwrap();
|
||||
while let Some(message) = stream.next().await {
|
||||
message.unwrap();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
fake_provider.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS
|
||||
);
|
||||
|
||||
// Get the first completion request that is in flight and mark it as completed.
|
||||
let completion = fake_provider
|
||||
.running_completions()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
fake_provider.finish_completion(&completion);
|
||||
|
||||
// Ensure that the number of in-flight completion requests is reduced.
|
||||
assert_eq!(
|
||||
fake_provider.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
|
||||
);
|
||||
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
// Ensure that another completion request was allowed to acquire the lock.
|
||||
assert_eq!(
|
||||
fake_provider.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS
|
||||
);
|
||||
|
||||
// Mark all completion requests as finished that are in flight.
|
||||
for request in fake_provider.running_completions() {
|
||||
fake_provider.finish_completion(&request);
|
||||
}
|
||||
|
||||
assert_eq!(fake_provider.completion_count(), 0);
|
||||
|
||||
// Wait until the background tasks acquire the lock again.
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
fake_provider.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
|
||||
);
|
||||
|
||||
// Finish all remaining completion requests.
|
||||
for request in fake_provider.running_completions() {
|
||||
fake_provider.finish_completion(&request);
|
||||
}
|
||||
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
assert_eq!(fake_provider.completion_count(), 0);
|
||||
}
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
use crate::{
|
||||
assistant_settings::CloudModel, count_open_ai_tokens, CompletionProvider, LanguageModel,
|
||||
LanguageModelCompletionProvider, LanguageModelRequest,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{proto, Client};
|
||||
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt, TryFutureExt};
|
||||
use gpui::{AnyView, AppContext, Task};
|
||||
use std::{future, sync::Arc};
|
||||
use strum::IntoEnumIterator;
|
||||
use ui::prelude::*;
|
||||
|
||||
pub struct CloudCompletionProvider {
|
||||
client: Arc<Client>,
|
||||
model: CloudModel,
|
||||
settings_version: usize,
|
||||
status: client::Status,
|
||||
_maintain_client_status: Task<()>,
|
||||
}
|
||||
|
||||
impl CloudCompletionProvider {
|
||||
pub fn new(
|
||||
model: CloudModel,
|
||||
client: Arc<Client>,
|
||||
settings_version: usize,
|
||||
cx: &mut AppContext,
|
||||
) -> Self {
|
||||
let mut status_rx = client.status();
|
||||
let status = *status_rx.borrow();
|
||||
let maintain_client_status = cx.spawn(|mut cx| async move {
|
||||
while let Some(status) = status_rx.next().await {
|
||||
let _ = cx.update_global::<CompletionProvider, _>(|provider, _cx| {
|
||||
provider.update_current_as::<_, Self>(|provider| {
|
||||
provider.status = status;
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
Self {
|
||||
client,
|
||||
model,
|
||||
settings_version,
|
||||
status,
|
||||
_maintain_client_status: maintain_client_status,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update(&mut self, model: CloudModel, settings_version: usize) {
|
||||
self.model = model;
|
||||
self.settings_version = settings_version;
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelCompletionProvider for CloudCompletionProvider {
|
||||
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
|
||||
let mut custom_model = if let CloudModel::Custom(custom_model) = self.model.clone() {
|
||||
Some(custom_model)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
CloudModel::iter()
|
||||
.filter_map(move |model| {
|
||||
if let CloudModel::Custom(_) = model {
|
||||
Some(CloudModel::Custom(custom_model.take()?))
|
||||
} else {
|
||||
Some(model)
|
||||
}
|
||||
})
|
||||
.map(LanguageModel::Cloud)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn settings_version(&self) -> usize {
|
||||
self.settings_version
|
||||
}
|
||||
|
||||
fn is_authenticated(&self) -> bool {
|
||||
self.status.is_connected()
|
||||
}
|
||||
|
||||
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
cx.spawn(move |cx| async move { client.authenticate_and_connect(true, &cx).await })
|
||||
}
|
||||
|
||||
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
|
||||
cx.new_view(|_cx| AuthenticationPrompt).into()
|
||||
}
|
||||
|
||||
fn reset_credentials(&self, _cx: &AppContext) -> Task<Result<()>> {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn model(&self) -> LanguageModel {
|
||||
LanguageModel::Cloud(self.model.clone())
|
||||
}
|
||||
|
||||
fn count_tokens(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
match request.model {
|
||||
LanguageModel::Cloud(CloudModel::Gpt4)
|
||||
| LanguageModel::Cloud(CloudModel::Gpt4Turbo)
|
||||
| LanguageModel::Cloud(CloudModel::Gpt4Omni)
|
||||
| LanguageModel::Cloud(CloudModel::Gpt3Point5Turbo) => {
|
||||
count_open_ai_tokens(request, cx.background_executor())
|
||||
}
|
||||
LanguageModel::Cloud(
|
||||
CloudModel::Claude3_5Sonnet
|
||||
| CloudModel::Claude3Opus
|
||||
| CloudModel::Claude3Sonnet
|
||||
| CloudModel::Claude3Haiku,
|
||||
) => {
|
||||
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
|
||||
count_open_ai_tokens(request, cx.background_executor())
|
||||
}
|
||||
LanguageModel::Cloud(CloudModel::Custom(model)) => {
|
||||
let request = self.client.request(proto::CountTokensWithLanguageModel {
|
||||
model,
|
||||
messages: request
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| message.to_proto())
|
||||
.collect(),
|
||||
});
|
||||
async move {
|
||||
let response = request.await?;
|
||||
Ok(response.token_count as usize)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
_ => future::ready(Err(anyhow!("invalid model"))).boxed(),
|
||||
}
|
||||
}
|
||||
|
||||
fn complete(
|
||||
&self,
|
||||
mut request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
request.preprocess();
|
||||
|
||||
let request = proto::CompleteWithLanguageModel {
|
||||
model: request.model.id().to_string(),
|
||||
messages: request
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| message.to_proto())
|
||||
.collect(),
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tools: Vec::new(),
|
||||
tool_choice: None,
|
||||
};
|
||||
|
||||
self.client
|
||||
.request_stream(request)
|
||||
.map_ok(|stream| {
|
||||
stream
|
||||
.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(mut response) => Some(Ok(response.choices.pop()?.delta?.content?)),
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
.boxed()
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
struct AuthenticationPrompt;
|
||||
|
||||
impl Render for AuthenticationPrompt {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
const LABEL: &str = "Generate and analyze code with language models. You can dialog with the assistant in this panel or transform code inline.";
|
||||
|
||||
v_flex().gap_6().p_4().child(Label::new(LABEL)).child(
|
||||
v_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Button::new("sign_in", "Sign in")
|
||||
.icon_color(Color::Muted)
|
||||
.icon(IconName::Github)
|
||||
.icon_position(IconPosition::Start)
|
||||
.style(ButtonStyle::Filled)
|
||||
.full_width()
|
||||
.on_click(|_, cx| {
|
||||
CompletionProvider::global(cx)
|
||||
.authenticate(cx)
|
||||
.detach_and_log_err(cx);
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
div().flex().w_full().items_center().child(
|
||||
Label::new("Sign in to enable collaboration.")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
|
||||
use gpui::{AnyView, AppContext, Task};
|
||||
use std::sync::Arc;
|
||||
use ui::WindowContext;
|
||||
|
||||
use crate::{LanguageModel, LanguageModelCompletionProvider, LanguageModelRequest};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct FakeCompletionProvider {
|
||||
current_completion_txs: Arc<parking_lot::Mutex<HashMap<String, mpsc::UnboundedSender<String>>>>,
|
||||
}
|
||||
|
||||
impl FakeCompletionProvider {
|
||||
pub fn setup_test(cx: &mut AppContext) -> Self {
|
||||
use crate::CompletionProvider;
|
||||
use parking_lot::RwLock;
|
||||
|
||||
let this = Self::default();
|
||||
let provider = CompletionProvider::new(Arc::new(RwLock::new(this.clone())), None);
|
||||
cx.set_global(provider);
|
||||
this
|
||||
}
|
||||
|
||||
pub fn running_completions(&self) -> Vec<LanguageModelRequest> {
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
.keys()
|
||||
.map(|k| serde_json::from_str(k).unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn completion_count(&self) -> usize {
|
||||
self.current_completion_txs.lock().len()
|
||||
}
|
||||
|
||||
pub fn send_completion(&self, request: &LanguageModelRequest, chunk: String) {
|
||||
let json = serde_json::to_string(request).unwrap();
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
.get(&json)
|
||||
.unwrap()
|
||||
.unbounded_send(chunk)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn finish_completion(&self, request: &LanguageModelRequest) {
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
.remove(&serde_json::to_string(request).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelCompletionProvider for FakeCompletionProvider {
|
||||
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
|
||||
vec![LanguageModel::default()]
|
||||
}
|
||||
|
||||
fn settings_version(&self) -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
fn is_authenticated(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn authenticate(&self, _cx: &AppContext) -> Task<Result<()>> {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn authentication_prompt(&self, _cx: &mut WindowContext) -> AnyView {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn reset_credentials(&self, _cx: &AppContext) -> Task<Result<()>> {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn model(&self) -> LanguageModel {
|
||||
LanguageModel::default()
|
||||
}
|
||||
|
||||
fn count_tokens(
|
||||
&self,
|
||||
_request: LanguageModelRequest,
|
||||
_cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
futures::future::ready(Ok(0)).boxed()
|
||||
}
|
||||
|
||||
fn complete(
|
||||
&self,
|
||||
_request: LanguageModelRequest,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
let (tx, rx) = mpsc::unbounded();
|
||||
self.current_completion_txs
|
||||
.lock()
|
||||
.insert(serde_json::to_string(&_request).unwrap(), tx);
|
||||
async move { Ok(rx.map(Ok).boxed()) }.boxed()
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
use crate::{
|
||||
assistant_settings::AssistantSettings, humanize_token_count, prompts::generate_content_prompt,
|
||||
AssistantPanel, AssistantPanelEvent, CompletionProvider, Hunk, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, Role, StreamingDiff,
|
||||
humanize_token_count, prompts::generate_content_prompt, AssistantPanel, AssistantPanelEvent,
|
||||
Hunk, LanguageModelCompletionProvider, ModelSelector, StreamingDiff,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
@@ -9,27 +8,36 @@ use collections::{hash_map, HashMap, HashSet, VecDeque};
|
||||
use editor::{
|
||||
actions::{MoveDown, MoveUp, SelectAll},
|
||||
display_map::{
|
||||
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock,
|
||||
BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
ToDisplayPoint,
|
||||
},
|
||||
Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle,
|
||||
ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use gpui::{
|
||||
point, AppContext, EventEmitter, FocusHandle, FocusableView, FontStyle, Global, HighlightStyle,
|
||||
Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView,
|
||||
WhiteSpace, WindowContext,
|
||||
use futures::{
|
||||
channel::mpsc,
|
||||
future::LocalBoxFuture,
|
||||
stream::{self, BoxStream},
|
||||
SinkExt, Stream, StreamExt,
|
||||
};
|
||||
use language::{Buffer, Point, Selection, TransactionId};
|
||||
use gpui::{
|
||||
point, AppContext, EventEmitter, FocusHandle, FocusableView, Global, HighlightStyle, Model,
|
||||
ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView,
|
||||
WindowContext,
|
||||
};
|
||||
use language::{Buffer, IndentKind, Point, Selection, TransactionId};
|
||||
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use settings::Settings;
|
||||
use similar::TextDiff;
|
||||
use smol::future::FutureExt;
|
||||
use std::{
|
||||
cmp, mem,
|
||||
cmp,
|
||||
future::Future,
|
||||
mem,
|
||||
ops::{Range, RangeInclusive},
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
@@ -37,7 +45,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
use ui::{prelude::*, IconButtonShape, Tooltip};
|
||||
use util::RangeExt;
|
||||
use workspace::{notifications::NotificationId, Toast, Workspace};
|
||||
|
||||
@@ -79,6 +87,7 @@ impl InlineAssistant {
|
||||
editor: &View<Editor>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
initial_prompt: Option<String>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
@@ -130,11 +139,11 @@ impl InlineAssistant {
|
||||
}
|
||||
|
||||
let assist_group_id = self.next_assist_group_id.post_inc();
|
||||
let prompt_buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let prompt_buffer =
|
||||
cx.new_model(|cx| Buffer::local(initial_prompt.unwrap_or_default(), cx));
|
||||
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
|
||||
let mut assists = Vec::new();
|
||||
let mut assist_blocks = Vec::new();
|
||||
let mut assist_to_focus = None;
|
||||
for range in codegen_ranges {
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
@@ -142,6 +151,7 @@ impl InlineAssistant {
|
||||
Codegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
range.clone(),
|
||||
None,
|
||||
self.telemetry.clone(),
|
||||
cx,
|
||||
)
|
||||
@@ -174,42 +184,18 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
assist_blocks.push(BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.start,
|
||||
height: prompt_editor.read(cx).height_in_lines,
|
||||
render: build_assist_editor_renderer(&prompt_editor),
|
||||
disposition: BlockDisposition::Above,
|
||||
});
|
||||
assist_blocks.push(BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.end,
|
||||
height: 1,
|
||||
render: Box::new(|cx| {
|
||||
v_flex()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
});
|
||||
assists.push((assist_id, prompt_editor));
|
||||
}
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
|
||||
let assist_block_ids = editor.update(cx, |editor, cx| {
|
||||
editor.insert_blocks(assist_blocks, None, cx)
|
||||
});
|
||||
assists.push((assist_id, prompt_editor, prompt_block_id, end_block_id));
|
||||
}
|
||||
|
||||
let editor_assists = self
|
||||
.assists_by_editor
|
||||
.entry(editor.downgrade())
|
||||
.or_insert_with(|| EditorInlineAssists::new(&editor, cx));
|
||||
let mut assist_group = InlineAssistGroup::new();
|
||||
for ((assist_id, prompt_editor), block_ids) in
|
||||
assists.into_iter().zip(assist_block_ids.chunks_exact(2))
|
||||
{
|
||||
for (assist_id, prompt_editor, prompt_block_id, end_block_id) in assists {
|
||||
self.assists.insert(
|
||||
assist_id,
|
||||
InlineAssist::new(
|
||||
@@ -218,8 +204,8 @@ impl InlineAssistant {
|
||||
assistant_panel.is_some(),
|
||||
editor,
|
||||
&prompt_editor,
|
||||
block_ids[0],
|
||||
block_ids[1],
|
||||
prompt_block_id,
|
||||
end_block_id,
|
||||
prompt_editor.read(cx).codegen.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
@@ -235,6 +221,128 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &View<Editor>,
|
||||
mut range: Range<Anchor>,
|
||||
initial_prompt: String,
|
||||
initial_insertion: Option<String>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> InlineAssistId {
|
||||
let assist_group_id = self.next_assist_group_id.post_inc();
|
||||
let prompt_buffer = cx.new_model(|cx| Buffer::local(&initial_prompt, cx));
|
||||
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
|
||||
let buffer = editor.read(cx).buffer().clone();
|
||||
let prepend_transaction_id = initial_insertion.and_then(|initial_insertion| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction(cx);
|
||||
buffer.edit([(range.start..range.start, initial_insertion)], None, cx);
|
||||
buffer.end_transaction(cx)
|
||||
})
|
||||
});
|
||||
|
||||
range.start = range.start.bias_left(&buffer.read(cx).read(cx));
|
||||
range.end = range.end.bias_right(&buffer.read(cx).read(cx));
|
||||
|
||||
let codegen = cx.new_model(|cx| {
|
||||
Codegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
range.clone(),
|
||||
prepend_transaction_id,
|
||||
self.telemetry.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
|
||||
let prompt_editor = cx.new_view(|cx| {
|
||||
PromptEditor::new(
|
||||
assist_id,
|
||||
gutter_dimensions.clone(),
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
editor,
|
||||
assistant_panel,
|
||||
workspace.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
|
||||
let editor_assists = self
|
||||
.assists_by_editor
|
||||
.entry(editor.downgrade())
|
||||
.or_insert_with(|| EditorInlineAssists::new(&editor, cx));
|
||||
|
||||
let mut assist_group = InlineAssistGroup::new();
|
||||
self.assists.insert(
|
||||
assist_id,
|
||||
InlineAssist::new(
|
||||
assist_id,
|
||||
assist_group_id,
|
||||
assistant_panel.is_some(),
|
||||
editor,
|
||||
&prompt_editor,
|
||||
prompt_block_id,
|
||||
end_block_id,
|
||||
prompt_editor.read(cx).codegen.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
),
|
||||
);
|
||||
assist_group.assist_ids.push(assist_id);
|
||||
editor_assists.assist_ids.push(assist_id);
|
||||
self.assist_groups.insert(assist_group_id, assist_group);
|
||||
assist_id
|
||||
}
|
||||
|
||||
fn insert_assist_blocks(
|
||||
&self,
|
||||
editor: &View<Editor>,
|
||||
range: &Range<Anchor>,
|
||||
prompt_editor: &View<PromptEditor>,
|
||||
cx: &mut WindowContext,
|
||||
) -> [CustomBlockId; 2] {
|
||||
let assist_blocks = vec![
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.start,
|
||||
height: prompt_editor.read(cx).height_in_lines,
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
disposition: BlockDisposition::Above,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.end,
|
||||
height: 1,
|
||||
render: Box::new(|cx| {
|
||||
v_flex()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
},
|
||||
];
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
let block_ids = editor.insert_blocks(assist_blocks, None, cx);
|
||||
[block_ids[0], block_ids[1]]
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_prompt_editor_focus_in(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
let assist = &self.assists[&assist_id];
|
||||
let Some(decorations) = assist.decorations.as_ref() else {
|
||||
@@ -379,6 +487,14 @@ impl InlineAssistant {
|
||||
cx.propagate();
|
||||
}
|
||||
|
||||
fn handle_editor_release(&mut self, editor: WeakView<Editor>, cx: &mut WindowContext) {
|
||||
if let Some(editor_assists) = self.assists_by_editor.get_mut(&editor) {
|
||||
for assist_id in editor_assists.assist_ids.clone() {
|
||||
self.finish_assist(assist_id, true, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_editor_change(&mut self, editor: View<Editor>, cx: &mut WindowContext) {
|
||||
let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) else {
|
||||
return;
|
||||
@@ -698,7 +814,7 @@ impl InlineAssistant {
|
||||
assist_group.assist_ids.clone()
|
||||
}
|
||||
|
||||
fn start_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
pub fn start_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
let assist = if let Some(assist) = self.assists.get_mut(&assist_id) {
|
||||
assist
|
||||
} else {
|
||||
@@ -727,16 +843,32 @@ impl InlineAssistant {
|
||||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
let codegen = assist.codegen.clone();
|
||||
let request = self.request_for_inline_assist(assist_id, cx);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let request = request.await?;
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
let telemetry_id = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|m| m.telemetry_id())
|
||||
.unwrap_or_default();
|
||||
let chunks: LocalBoxFuture<Result<BoxStream<Result<String>>>> =
|
||||
if user_prompt.trim().to_lowercase() == "delete" {
|
||||
async { Ok(stream::empty().boxed()) }.boxed_local()
|
||||
} else {
|
||||
let request = self.request_for_inline_assist(assist_id, cx);
|
||||
let mut cx = cx.to_async();
|
||||
async move {
|
||||
let request = request.await?;
|
||||
let chunks = cx
|
||||
.update(|cx| {
|
||||
LanguageModelCompletionProvider::read_global(cx)
|
||||
.stream_completion(request, cx)
|
||||
})?
|
||||
.await?;
|
||||
Ok(chunks.boxed())
|
||||
}
|
||||
.boxed_local()
|
||||
};
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(telemetry_id, chunks, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn request_for_inline_assist(
|
||||
@@ -745,8 +877,8 @@ impl InlineAssistant {
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<LanguageModelRequest>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let (user_prompt, context_request, project_name, buffer, range, model) = cx
|
||||
.read_global(|this: &InlineAssistant, cx: &WindowContext| {
|
||||
let (user_prompt, context_request, project_name, buffer, range) =
|
||||
cx.read_global(|this: &InlineAssistant, cx: &WindowContext| {
|
||||
let assist = this.assists.get(&assist_id).context("invalid assist")?;
|
||||
let decorations = assist.decorations.as_ref().context("invalid assist")?;
|
||||
let editor = assist.editor.upgrade().context("invalid assist")?;
|
||||
@@ -780,15 +912,7 @@ impl InlineAssistant {
|
||||
});
|
||||
let buffer = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let range = assist.codegen.read(cx).range.clone();
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
anyhow::Ok((
|
||||
user_prompt,
|
||||
context_request,
|
||||
project_name,
|
||||
buffer,
|
||||
range,
|
||||
model,
|
||||
))
|
||||
anyhow::Ok((user_prompt, context_request, project_name, buffer, range))
|
||||
})??;
|
||||
|
||||
let language = buffer.language_at(range.start);
|
||||
@@ -847,7 +971,6 @@ impl InlineAssistant {
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
model,
|
||||
messages,
|
||||
stop: vec!["|END|>".to_string()],
|
||||
temperature,
|
||||
@@ -855,7 +978,7 @@ impl InlineAssistant {
|
||||
})
|
||||
}
|
||||
|
||||
fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
pub fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
let assist = if let Some(assist) = self.assists.get_mut(&assist_id) {
|
||||
assist
|
||||
} else {
|
||||
@@ -1074,6 +1197,14 @@ impl EditorInlineAssists {
|
||||
}
|
||||
}),
|
||||
_subscriptions: vec![
|
||||
cx.observe_release(editor, {
|
||||
let editor = editor.downgrade();
|
||||
|_, cx| {
|
||||
InlineAssistant::update_global(cx, |this, cx| {
|
||||
this.handle_editor_release(editor, cx);
|
||||
})
|
||||
}
|
||||
}),
|
||||
cx.observe(editor, move |editor, cx| {
|
||||
InlineAssistant::update_global(cx, |this, cx| {
|
||||
this.handle_editor_change(editor, cx)
|
||||
@@ -1138,7 +1269,7 @@ fn build_assist_editor_renderer(editor: &View<PromptEditor>) -> RenderBlock {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
|
||||
struct InlineAssistId(usize);
|
||||
pub struct InlineAssistId(usize);
|
||||
|
||||
impl InlineAssistId {
|
||||
fn post_inc(&mut self) -> InlineAssistId {
|
||||
@@ -1192,22 +1323,19 @@ impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
impl Render for PromptEditor {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let gutter_dimensions = *self.gutter_dimensions.lock();
|
||||
let fs = self.fs.clone();
|
||||
|
||||
let buttons = match &self.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
),
|
||||
IconButton::new("start", IconName::Sparkle)
|
||||
IconButton::new("start", IconName::SparkleAlt)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Transform", &menu::Confirm, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StartRequested)),
|
||||
@@ -1218,15 +1346,14 @@ impl Render for PromptEditor {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::text("Cancel Assist", cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
),
|
||||
IconButton::new("stop", IconName::Stop)
|
||||
.icon_color(Color::Error)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Interrupt Transformation",
|
||||
@@ -1244,7 +1371,7 @@ impl Render for PromptEditor {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
@@ -1252,8 +1379,7 @@ impl Render for PromptEditor {
|
||||
if self.edited_since_done {
|
||||
IconButton::new("restart", IconName::RotateCw)
|
||||
.icon_color(Color::Info)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Restart Transformation",
|
||||
@@ -1268,7 +1394,7 @@ impl Render for PromptEditor {
|
||||
} else {
|
||||
IconButton::new("confirm", IconName::Check)
|
||||
.icon_color(Color::Info)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Confirm Assist", &menu::Confirm, cx))
|
||||
.on_click(cx.listener(|_, _, cx| {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested);
|
||||
@@ -1295,57 +1421,32 @@ impl Render for PromptEditor {
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.child(
|
||||
PopoverMenu::new("model-switcher")
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::global(cx).available_models(cx)
|
||||
{
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
move |_| {
|
||||
Label::new(model.display_name())
|
||||
.into_any_element()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = fs.clone();
|
||||
let model = model.clone();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
move |settings| settings.set_model(model),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
menu
|
||||
})
|
||||
.into()
|
||||
})
|
||||
.trigger(
|
||||
IconButton::new("context", IconName::Settings)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"Using {}",
|
||||
CompletionProvider::global(cx)
|
||||
.model()
|
||||
.display_name()
|
||||
),
|
||||
None,
|
||||
"Click to Change Model",
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.anchor(gpui::AnchorCorner::BottomRight),
|
||||
ModelSelector::new(
|
||||
self.fs.clone(),
|
||||
IconButton::new("context", IconName::SlidersAlt)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"Using {}",
|
||||
LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|model| model.name().0)
|
||||
.unwrap_or_else(|| "No model selected".into()),
|
||||
),
|
||||
None,
|
||||
"Change Model",
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.with_info_text(
|
||||
"Inline edits use context\n\
|
||||
from the currently selected\n\
|
||||
assistant panel tab.",
|
||||
),
|
||||
)
|
||||
.children(
|
||||
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
|
||||
@@ -1369,7 +1470,7 @@ impl Render for PromptEditor {
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.pr_4()
|
||||
.pr_6()
|
||||
.children(self.render_token_count(cx))
|
||||
.children(buttons),
|
||||
)
|
||||
@@ -1534,13 +1635,18 @@ impl PromptEditor {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let token_count = cx
|
||||
.update(|cx| CompletionProvider::global(cx).count_tokens(request, cx))?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
if let Some(token_count) = cx.update(|cx| {
|
||||
LanguageModelCompletionProvider::read_global(cx).count_tokens(request, cx)
|
||||
})? {
|
||||
let token_count = token_count.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1663,7 +1769,7 @@ impl PromptEditor {
|
||||
}
|
||||
|
||||
fn render_token_count(&self, cx: &mut ViewContext<Self>) -> Option<impl IntoElement> {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let model = LanguageModelCompletionProvider::read_global(cx).active_model()?;
|
||||
let token_count = self.token_count?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
@@ -1729,14 +1835,11 @@ impl PromptEditor {
|
||||
},
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
..Default::default()
|
||||
};
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
@@ -1768,8 +1871,8 @@ impl InlineAssist {
|
||||
include_context: bool,
|
||||
editor: &View<Editor>,
|
||||
prompt_editor: &View<PromptEditor>,
|
||||
prompt_block_id: BlockId,
|
||||
end_block_id: BlockId,
|
||||
prompt_block_id: CustomBlockId,
|
||||
end_block_id: CustomBlockId,
|
||||
codegen: Model<Codegen>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -1863,10 +1966,10 @@ impl InlineAssist {
|
||||
}
|
||||
|
||||
struct InlineAssistDecorations {
|
||||
prompt_block_id: BlockId,
|
||||
prompt_block_id: CustomBlockId,
|
||||
prompt_editor: View<PromptEditor>,
|
||||
removed_line_block_ids: HashSet<BlockId>,
|
||||
end_block_id: BlockId,
|
||||
removed_line_block_ids: HashSet<CustomBlockId>,
|
||||
end_block_id: CustomBlockId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -1882,7 +1985,8 @@ pub struct Codegen {
|
||||
range: Range<Anchor>,
|
||||
edit_position: Anchor,
|
||||
last_equal_ranges: Vec<Range<Anchor>>,
|
||||
transaction_id: Option<TransactionId>,
|
||||
prepend_transaction_id: Option<TransactionId>,
|
||||
generation_transaction_id: Option<TransactionId>,
|
||||
status: CodegenStatus,
|
||||
generation: Task<()>,
|
||||
diff: Diff,
|
||||
@@ -1911,6 +2015,7 @@ impl Codegen {
|
||||
pub fn new(
|
||||
buffer: Model<MultiBuffer>,
|
||||
range: Range<Anchor>,
|
||||
prepend_transaction_id: Option<TransactionId>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
@@ -1943,7 +2048,8 @@ impl Codegen {
|
||||
range,
|
||||
snapshot,
|
||||
last_equal_ranges: Default::default(),
|
||||
transaction_id: Default::default(),
|
||||
prepend_transaction_id,
|
||||
generation_transaction_id: None,
|
||||
status: CodegenStatus::Idle,
|
||||
generation: Task::ready(()),
|
||||
diff: Diff::default(),
|
||||
@@ -1959,8 +2065,13 @@ impl Codegen {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if let multi_buffer::Event::TransactionUndone { transaction_id } = event {
|
||||
if self.transaction_id == Some(*transaction_id) {
|
||||
self.transaction_id = None;
|
||||
if self.generation_transaction_id == Some(*transaction_id) {
|
||||
self.generation_transaction_id = None;
|
||||
self.generation = Task::ready(());
|
||||
cx.emit(CodegenEvent::Undone);
|
||||
} else if self.prepend_transaction_id == Some(*transaction_id) {
|
||||
self.prepend_transaction_id = None;
|
||||
self.generation_transaction_id = None;
|
||||
self.generation = Task::ready(());
|
||||
cx.emit(CodegenEvent::Undone);
|
||||
}
|
||||
@@ -1971,7 +2082,12 @@ impl Codegen {
|
||||
&self.last_equal_ranges
|
||||
}
|
||||
|
||||
pub fn start(&mut self, prompt: LanguageModelRequest, cx: &mut ModelContext<Self>) {
|
||||
pub fn start(
|
||||
&mut self,
|
||||
telemetry_id: String,
|
||||
stream: impl 'static + Future<Output = Result<BoxStream<'static, Result<String>>>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let range = self.range.clone();
|
||||
let snapshot = self.snapshot.clone();
|
||||
let selected_text = snapshot
|
||||
@@ -1979,21 +2095,37 @@ impl Codegen {
|
||||
.collect::<Rope>();
|
||||
|
||||
let selection_start = range.start.to_point(&snapshot);
|
||||
let suggested_line_indent = snapshot
|
||||
.suggested_indents(selection_start.row..selection_start.row + 1, cx)
|
||||
|
||||
// Start with the indentation of the first line in the selection
|
||||
let mut suggested_line_indent = snapshot
|
||||
.suggested_indents(selection_start.row..=selection_start.row, cx)
|
||||
.into_values()
|
||||
.next()
|
||||
.unwrap_or_else(|| snapshot.indent_size_for_line(MultiBufferRow(selection_start.row)));
|
||||
|
||||
let model_telemetry_id = prompt.model.telemetry_id();
|
||||
let response = CompletionProvider::global(cx).complete(prompt, cx);
|
||||
// If the first line in the selection does not have indentation, check the following lines
|
||||
if suggested_line_indent.len == 0 && suggested_line_indent.kind == IndentKind::Space {
|
||||
for row in selection_start.row..=range.end.to_point(&snapshot).row {
|
||||
let line_indent = snapshot.indent_size_for_line(MultiBufferRow(row));
|
||||
// Prefer tabs if a line in the selection uses tabs as indentation
|
||||
if line_indent.kind == IndentKind::Tab {
|
||||
suggested_line_indent.kind = IndentKind::Tab;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let telemetry = self.telemetry.clone();
|
||||
self.edit_position = range.start;
|
||||
self.diff = Diff::default();
|
||||
self.status = CodegenStatus::Pending;
|
||||
if let Some(transaction_id) = self.generation_transaction_id.take() {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
|
||||
}
|
||||
self.generation = cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
let response = response.await;
|
||||
let chunks = stream.await;
|
||||
let generate = async {
|
||||
let mut edit_start = range.start.to_offset(&snapshot);
|
||||
|
||||
@@ -2003,7 +2135,7 @@ impl Codegen {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(response.inner.await?);
|
||||
let chunks = StripInvalidSpans::new(chunks?);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
|
||||
@@ -2086,7 +2218,7 @@ impl Codegen {
|
||||
telemetry.report_assistant_event(
|
||||
None,
|
||||
telemetry_events::AssistantKind::Inline,
|
||||
model_telemetry_id,
|
||||
telemetry_id,
|
||||
response_latency,
|
||||
error_message,
|
||||
);
|
||||
@@ -2136,7 +2268,7 @@ impl Codegen {
|
||||
});
|
||||
|
||||
if let Some(transaction) = transaction {
|
||||
if let Some(first_transaction) = this.transaction_id {
|
||||
if let Some(first_transaction) = this.generation_transaction_id {
|
||||
// Group all assistant edits into the first transaction.
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.merge_transactions(
|
||||
@@ -2146,7 +2278,7 @@ impl Codegen {
|
||||
)
|
||||
});
|
||||
} else {
|
||||
this.transaction_id = Some(transaction);
|
||||
this.generation_transaction_id = Some(transaction);
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction(cx)
|
||||
});
|
||||
@@ -2189,7 +2321,12 @@ impl Codegen {
|
||||
}
|
||||
|
||||
pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let Some(transaction_id) = self.transaction_id.take() {
|
||||
if let Some(transaction_id) = self.prepend_transaction_id.take() {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
|
||||
}
|
||||
|
||||
if let Some(transaction_id) = self.generation_transaction_id.take() {
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
|
||||
}
|
||||
@@ -2451,10 +2588,6 @@ fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::FakeCompletionProvider;
|
||||
|
||||
use super::*;
|
||||
use futures::stream::{self};
|
||||
use gpui::{Context, TestAppContext};
|
||||
@@ -2463,9 +2596,11 @@ mod tests {
|
||||
language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher,
|
||||
Point,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use rand::prelude::*;
|
||||
use serde::Serialize;
|
||||
use settings::SettingsStore;
|
||||
use std::{future, sync::Arc};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DummyCompletionRequest {
|
||||
@@ -2475,7 +2610,8 @@ mod tests {
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.update(language_model::LanguageModelRegistry::test);
|
||||
cx.update(completion::LanguageModelCompletionProvider::test);
|
||||
cx.update(language_settings::init);
|
||||
|
||||
let text = indoc! {"
|
||||
@@ -2493,14 +2629,17 @@ mod tests {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(LanguageModelRequest::default(), cx)
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
let mut new_text = concat!(
|
||||
" let mut x = 0;\n",
|
||||
" while x < 10 {\n",
|
||||
@@ -2511,11 +2650,11 @@ mod tests {
|
||||
let max_len = cmp::min(new_text.len(), 10);
|
||||
let len = rng.gen_range(1..=max_len);
|
||||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
|
||||
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
|
||||
new_text = suffix;
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion(&LanguageModelRequest::default());
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
@@ -2536,7 +2675,6 @@ mod tests {
|
||||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
) {
|
||||
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
@@ -2552,10 +2690,16 @@ mod tests {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 6))..snapshot.anchor_after(Point::new(1, 6))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let request = LanguageModelRequest::default();
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
@@ -2569,11 +2713,11 @@ mod tests {
|
||||
let max_len = cmp::min(new_text.len(), 10);
|
||||
let len = rng.gen_range(1..=max_len);
|
||||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
|
||||
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
|
||||
new_text = suffix;
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion(&LanguageModelRequest::default());
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
@@ -2594,7 +2738,8 @@ mod tests {
|
||||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
) {
|
||||
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
cx.update(completion::LanguageModelCompletionProvider::test);
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
@@ -2610,10 +2755,16 @@ mod tests {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(1, 2))..snapshot.anchor_after(Point::new(1, 2))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let request = LanguageModelRequest::default();
|
||||
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
@@ -2627,11 +2778,11 @@ mod tests {
|
||||
let max_len = cmp::min(new_text.len(), 10);
|
||||
let len = rng.gen_range(1..=max_len);
|
||||
let (chunk, suffix) = new_text.split_at(len);
|
||||
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
|
||||
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
|
||||
new_text = suffix;
|
||||
cx.background_executor.run_until_parked();
|
||||
}
|
||||
provider.finish_completion(&LanguageModelRequest::default());
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
@@ -2647,6 +2798,62 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_autoindent_respects_tabs_in_selection(cx: &mut TestAppContext) {
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
cx.update(completion::LanguageModelCompletionProvider::test);
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(language_settings::init);
|
||||
|
||||
let text = indoc! {"
|
||||
func main() {
|
||||
\tx := 0
|
||||
\tfor i := 0; i < 10; i++ {
|
||||
\t\tx++
|
||||
\t}
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
snapshot.anchor_before(Point::new(0, 0))..snapshot.anchor_after(Point::new(4, 2))
|
||||
});
|
||||
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
|
||||
|
||||
let (chunks_tx, chunks_rx) = mpsc::unbounded();
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.start(
|
||||
String::new(),
|
||||
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let new_text = concat!(
|
||||
"func main() {\n",
|
||||
"\tx := 0\n",
|
||||
"\tfor x < 10 {\n",
|
||||
"\t\tx++\n",
|
||||
"\t}", //
|
||||
);
|
||||
chunks_tx.unbounded_send(new_text.to_string()).unwrap();
|
||||
drop(chunks_tx);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
|
||||
indoc! {"
|
||||
func main() {
|
||||
\tx := 0
|
||||
\tfor x < 10 {
|
||||
\t\tx++
|
||||
\t}
|
||||
}
|
||||
"}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_strip_invalid_spans_from_codeblock() {
|
||||
assert_chunks("Lorem ipsum dolor", "Lorem ipsum dolor").await;
|
||||
|
||||
@@ -1,82 +1,148 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{assistant_settings::AssistantSettings, CompletionProvider, ToggleModelSelector};
|
||||
use crate::{assistant_settings::AssistantSettings, LanguageModelCompletionProvider};
|
||||
use fs::Fs;
|
||||
use gpui::SharedString;
|
||||
use language_model::LanguageModelRegistry;
|
||||
use settings::update_settings_file;
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, PopoverMenuHandle, PopoverTrigger};
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct ModelSelector {
|
||||
handle: PopoverMenuHandle<ContextMenu>,
|
||||
pub struct ModelSelector<T: PopoverTrigger> {
|
||||
handle: Option<PopoverMenuHandle<ContextMenu>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
trigger: T,
|
||||
info_text: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl ModelSelector {
|
||||
pub fn new(handle: PopoverMenuHandle<ContextMenu>, fs: Arc<dyn Fs>) -> Self {
|
||||
ModelSelector { handle, fs }
|
||||
impl<T: PopoverTrigger> ModelSelector<T> {
|
||||
pub fn new(fs: Arc<dyn Fs>, trigger: T) -> Self {
|
||||
ModelSelector {
|
||||
handle: None,
|
||||
fs,
|
||||
trigger,
|
||||
info_text: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_handle(mut self, handle: PopoverMenuHandle<ContextMenu>) -> Self {
|
||||
self.handle = Some(handle);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_info_text(mut self, text: impl Into<SharedString>) -> Self {
|
||||
self.info_text = Some(text.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for ModelSelector {
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
PopoverMenu::new("model-switcher")
|
||||
.with_handle(self.handle)
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::global(cx).available_models(cx) {
|
||||
impl<T: PopoverTrigger> RenderOnce for ModelSelector<T> {
|
||||
fn render(self, _: &mut WindowContext) -> impl IntoElement {
|
||||
let mut menu = PopoverMenu::new("model-switcher");
|
||||
if let Some(handle) = self.handle {
|
||||
menu = menu.with_handle(handle);
|
||||
}
|
||||
|
||||
let info_text = self.info_text.clone();
|
||||
|
||||
menu.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
if let Some(info_text) = info_text.clone() {
|
||||
menu = menu
|
||||
.custom_row(move |_cx| {
|
||||
Label::new(info_text.clone())
|
||||
.color(Color::Muted)
|
||||
.into_any_element()
|
||||
})
|
||||
.separator();
|
||||
}
|
||||
|
||||
for (index, provider) in LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.providers()
|
||||
.enumerate()
|
||||
{
|
||||
if index > 0 {
|
||||
menu = menu.separator();
|
||||
}
|
||||
menu = menu.header(provider.name().0);
|
||||
|
||||
let available_models = provider.provided_models(cx);
|
||||
if available_models.is_empty() {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
move |_| Label::new(model.display_name()).into_any_element()
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Settings))
|
||||
.child(Label::new("Configure"))
|
||||
.into_any()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = self.fs.clone();
|
||||
let model = model.clone();
|
||||
let provider = provider.id();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
LanguageModelCompletionProvider::global(cx).update(
|
||||
cx,
|
||||
move |settings| settings.set_model(model),
|
||||
|completion_provider, cx| {
|
||||
completion_provider
|
||||
.set_active_provider(provider.clone(), cx)
|
||||
},
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
menu
|
||||
})
|
||||
.into()
|
||||
})
|
||||
.trigger(
|
||||
ButtonLike::new("active-model")
|
||||
.style(ButtonStyle::Subtle)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
div()
|
||||
.overflow_x_hidden()
|
||||
.flex_grow()
|
||||
.whitespace_nowrap()
|
||||
.child(
|
||||
Label::new(
|
||||
CompletionProvider::global(cx).model().display_name(),
|
||||
|
||||
let selected_model = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|m| m.id());
|
||||
let selected_provider = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_provider()
|
||||
.map(|m| m.id());
|
||||
|
||||
for available_model in available_models {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let id = available_model.id();
|
||||
let provider_id = available_model.provider_id();
|
||||
let model_name = available_model.name().0.clone();
|
||||
let selected_model = selected_model.clone();
|
||||
let selected_provider = selected_provider.clone();
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(model_name.clone()))
|
||||
.when(
|
||||
selected_model.as_ref() == Some(&id)
|
||||
&& selected_provider.as_ref() == Some(&provider_id),
|
||||
|this| this.child(Icon::new(IconName::Check)),
|
||||
)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Icon::new(IconName::ChevronDown)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::XSmall),
|
||||
),
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::for_action("Change Model", &ToggleModelSelector, cx)
|
||||
}),
|
||||
)
|
||||
.attach(gpui::AnchorCorner::BottomLeft)
|
||||
.into_any()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = self.fs.clone();
|
||||
let model = available_model.clone();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
move |settings, _| settings.set_model(model),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
menu
|
||||
})
|
||||
.into()
|
||||
})
|
||||
.trigger(self.trigger)
|
||||
.attach(gpui::AnchorCorner::BottomLeft)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use crate::{
|
||||
slash_command::SlashCommandCompletionProvider, AssistantPanel, CompletionProvider,
|
||||
InlineAssist, InlineAssistant, LanguageModelRequest, LanguageModelRequestMessage, Role,
|
||||
slash_command::SlashCommandCompletionProvider, AssistantPanel, InlineAssist, InlineAssistant,
|
||||
LanguageModelCompletionProvider,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assets::Assets;
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
@@ -12,12 +13,13 @@ use futures::{
|
||||
};
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{
|
||||
actions, point, size, transparent_black, AppContext, BackgroundExecutor, Bounds, EventEmitter,
|
||||
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
actions, point, size, transparent_black, AppContext, AssetSource, BackgroundExecutor, Bounds,
|
||||
EventEmitter, Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
|
||||
};
|
||||
use heed::{types::SerdeBincode, Database, RoTxn};
|
||||
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
|
||||
use parking_lot::RwLock;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use rope::Rope;
|
||||
@@ -627,17 +629,18 @@ impl PromptLibrary {
|
||||
self.picker.update(cx, |picker, cx| picker.focus(cx));
|
||||
}
|
||||
|
||||
pub fn inline_assist(&mut self, _: &InlineAssist, cx: &mut ViewContext<Self>) {
|
||||
pub fn inline_assist(&mut self, action: &InlineAssist, cx: &mut ViewContext<Self>) {
|
||||
let Some(active_prompt_id) = self.active_prompt_id else {
|
||||
cx.propagate();
|
||||
return;
|
||||
};
|
||||
|
||||
let prompt_editor = &self.prompt_editors[&active_prompt_id].body_editor;
|
||||
let provider = CompletionProvider::global(cx);
|
||||
if provider.is_authenticated() {
|
||||
let provider = LanguageModelCompletionProvider::read_global(cx);
|
||||
let initial_prompt = action.prompt.clone();
|
||||
if provider.is_authenticated(cx) {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(&prompt_editor, None, None, cx)
|
||||
assistant.assist(&prompt_editor, None, None, initial_prompt, cx)
|
||||
})
|
||||
} else {
|
||||
for window in cx.windows() {
|
||||
@@ -731,29 +734,29 @@ impl PromptLibrary {
|
||||
const DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
|
||||
|
||||
cx.background_executor().timer(DEBOUNCE_TIMEOUT).await;
|
||||
let token_count = cx
|
||||
.update(|cx| {
|
||||
let provider = CompletionProvider::global(cx);
|
||||
let model = provider.model();
|
||||
provider.count_tokens(
|
||||
LanguageModelRequest {
|
||||
model,
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: body.to_string(),
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let prompt_editor = this.prompt_editors.get_mut(&prompt_id).unwrap();
|
||||
prompt_editor.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
if let Some(token_count) = cx.update(|cx| {
|
||||
LanguageModelCompletionProvider::read_global(cx).count_tokens(
|
||||
LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: body.to_string(),
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})? {
|
||||
let token_count = token_count.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let prompt_editor = this.prompt_editors.get_mut(&prompt_id).unwrap();
|
||||
prompt_editor.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
.log_err()
|
||||
});
|
||||
@@ -803,7 +806,7 @@ impl PromptLibrary {
|
||||
let prompt_metadata = self.store.metadata(prompt_id)?;
|
||||
let prompt_editor = &self.prompt_editors[&prompt_id];
|
||||
let focus_handle = prompt_editor.body_editor.focus_handle(cx);
|
||||
let current_model = CompletionProvider::global(cx).model();
|
||||
let current_model = LanguageModelCompletionProvider::read_global(cx).active_model();
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
|
||||
Some(
|
||||
@@ -914,7 +917,11 @@ impl PromptLibrary {
|
||||
format!(
|
||||
"Model: {}",
|
||||
current_model
|
||||
.display_name()
|
||||
.as_ref()
|
||||
.map(|model| model
|
||||
.name()
|
||||
.0)
|
||||
.unwrap_or_default()
|
||||
),
|
||||
cx,
|
||||
)
|
||||
@@ -1296,6 +1303,17 @@ impl PromptStore {
|
||||
fn first(&self) -> Option<PromptMetadata> {
|
||||
self.metadata_cache.read().metadata.first().cloned()
|
||||
}
|
||||
|
||||
pub fn operations_prompt(&self) -> String {
|
||||
String::from_utf8(
|
||||
Assets
|
||||
.load("prompts/operations.md")
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.to_vec(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wraps a shared future to a prompt store so it can be assigned as a context global.
|
||||
|
||||
@@ -1,171 +0,0 @@
|
||||
use language::Rope;
|
||||
use std::ops::Range;
|
||||
|
||||
/// Search the given buffer for the given substring, ignoring any differences
|
||||
/// in line indentation between the query and the buffer.
|
||||
///
|
||||
/// Returns a vector of ranges of byte offsets in the buffer corresponding
|
||||
/// to the entire lines of the buffer.
|
||||
pub fn fuzzy_search_lines(haystack: &Rope, needle: &str) -> Option<Range<usize>> {
|
||||
const SIMILARITY_THRESHOLD: f64 = 0.8;
|
||||
|
||||
let mut best_match: Option<(Range<usize>, f64)> = None; // (range, score)
|
||||
let mut haystack_lines = haystack.chunks().lines();
|
||||
let mut haystack_line_start = 0;
|
||||
while let Some(mut haystack_line) = haystack_lines.next() {
|
||||
let next_haystack_line_start = haystack_line_start + haystack_line.len() + 1;
|
||||
let mut advanced_to_next_haystack_line = false;
|
||||
|
||||
let mut matched = true;
|
||||
let match_start = haystack_line_start;
|
||||
let mut match_end = next_haystack_line_start;
|
||||
let mut match_score = 0.0;
|
||||
let mut needle_lines = needle.lines().peekable();
|
||||
while let Some(needle_line) = needle_lines.next() {
|
||||
let similarity = line_similarity(haystack_line, needle_line);
|
||||
if similarity >= SIMILARITY_THRESHOLD {
|
||||
match_end = haystack_lines.offset();
|
||||
match_score += similarity;
|
||||
|
||||
if needle_lines.peek().is_some() {
|
||||
if let Some(next_haystack_line) = haystack_lines.next() {
|
||||
advanced_to_next_haystack_line = true;
|
||||
haystack_line = next_haystack_line;
|
||||
} else {
|
||||
matched = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
matched = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if matched
|
||||
&& best_match
|
||||
.as_ref()
|
||||
.map(|(_, best_score)| match_score > *best_score)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
best_match = Some((match_start..match_end, match_score));
|
||||
}
|
||||
|
||||
if advanced_to_next_haystack_line {
|
||||
haystack_lines.seek(next_haystack_line_start);
|
||||
}
|
||||
haystack_line_start = next_haystack_line_start;
|
||||
}
|
||||
|
||||
best_match.map(|(range, _)| range)
|
||||
}
|
||||
|
||||
/// Calculates the similarity between two lines, ignoring leading and trailing whitespace,
|
||||
/// using the Jaro-Winkler distance.
|
||||
///
|
||||
/// Returns a value between 0.0 and 1.0, where 1.0 indicates an exact match.
|
||||
fn line_similarity(line1: &str, line2: &str) -> f64 {
|
||||
strsim::jaro_winkler(line1.trim(), line2.trim())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::{AppContext, Context as _};
|
||||
use language::Buffer;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_fuzzy_search_lines(cx: &mut AppContext) {
|
||||
let (text, expected_ranges) = marked_text_ranges(
|
||||
&r#"
|
||||
fn main() {
|
||||
if a() {
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
does_not_match,
|
||||
);
|
||||
}
|
||||
|
||||
println!("hi");
|
||||
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
); // this last line does not match
|
||||
|
||||
« assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
);
|
||||
»
|
||||
|
||||
« assert_eq!(
|
||||
"something",
|
||||
"else",
|
||||
);
|
||||
»
|
||||
}
|
||||
"#
|
||||
.unindent(),
|
||||
false,
|
||||
);
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local(&text, cx));
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
);
|
||||
"
|
||||
.unindent(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(actual_range, expected_ranges[0]);
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
assert_eq!(
|
||||
1 + 2,
|
||||
3,
|
||||
);
|
||||
"
|
||||
.unindent(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(actual_range, expected_ranges[0]);
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
asst_eq!(
|
||||
\"something\",
|
||||
\"els\"
|
||||
)
|
||||
"
|
||||
.unindent(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(actual_range, expected_ranges[1]);
|
||||
|
||||
let actual_range = fuzzy_search_lines(
|
||||
snapshot.as_rope(),
|
||||
&"
|
||||
assert_eq!(
|
||||
2 + 1,
|
||||
3,
|
||||
);
|
||||
"
|
||||
.unindent(),
|
||||
);
|
||||
assert_eq!(actual_range, None);
|
||||
}
|
||||
}
|
||||
@@ -33,7 +33,7 @@ impl DiagnosticsSlashCommand {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let entries = workspace.recent_navigation_history(Some(10), cx);
|
||||
let path_prefix: Arc<str> = "".into();
|
||||
let path_prefix: Arc<str> = Arc::default();
|
||||
Task::ready(
|
||||
entries
|
||||
.into_iter()
|
||||
@@ -284,7 +284,7 @@ fn collect_diagnostics(
|
||||
PathBuf::try_from(path)
|
||||
.ok()
|
||||
.and_then(|path| {
|
||||
project.read(cx).worktrees().find_map(|worktree| {
|
||||
project.read(cx).worktrees(cx).find_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let worktree_root_path = Path::new(worktree.root_name());
|
||||
let relative_path = path.strip_prefix(worktree_root_path).ok()?;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use gpui::{AppContext, Model, Task, WeakView};
|
||||
use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView};
|
||||
use indexed_docs::{
|
||||
DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName,
|
||||
ProviderId,
|
||||
@@ -23,7 +24,7 @@ impl DocsSlashCommand {
|
||||
pub const NAME: &'static str = "docs";
|
||||
|
||||
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees().next()?;
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path("Cargo.toml")?;
|
||||
let path = ProjectPath {
|
||||
@@ -90,6 +91,55 @@ impl DocsSlashCommand {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs just-in-time indexing for a given package, in case the slash command
|
||||
/// is run without any entries existing in the index.
|
||||
fn run_just_in_time_indexing(
|
||||
store: Arc<IndexedDocsStore>,
|
||||
key: String,
|
||||
package: PackageName,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Task<()> {
|
||||
executor.clone().spawn(async move {
|
||||
let (prefix, needs_full_index) = if let Some((prefix, _)) = key.split_once('*') {
|
||||
// If we have a wildcard in the search, we want to wait until
|
||||
// we've completely finished indexing so we get a full set of
|
||||
// results for the wildcard.
|
||||
(prefix.to_string(), true)
|
||||
} else {
|
||||
(key, false)
|
||||
};
|
||||
|
||||
// If we already have some entries, we assume that we've indexed the package before
|
||||
// and don't need to do it again.
|
||||
let has_any_entries = store
|
||||
.any_with_prefix(prefix.clone())
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
if has_any_entries {
|
||||
return ();
|
||||
};
|
||||
|
||||
let index_task = store.clone().index(package.clone());
|
||||
|
||||
if needs_full_index {
|
||||
_ = index_task.await;
|
||||
} else {
|
||||
loop {
|
||||
executor.timer(Duration::from_millis(200)).await;
|
||||
|
||||
if store
|
||||
.any_with_prefix(prefix.clone())
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
|| !store.is_indexing(&package)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for DocsSlashCommand {
|
||||
@@ -169,7 +219,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
if index {
|
||||
// We don't need to hold onto this task, as the `IndexedDocsStore` will hold it
|
||||
// until it completes.
|
||||
let _ = store.clone().index(package.as_str().into());
|
||||
drop(store.clone().index(package.as_str().into()));
|
||||
}
|
||||
|
||||
let items = store.search(package).await;
|
||||
@@ -200,13 +250,14 @@ impl SlashCommand for DocsSlashCommand {
|
||||
};
|
||||
|
||||
let args = DocsSlashCommandArgs::parse(argument);
|
||||
let executor = cx.background_executor().clone();
|
||||
let task = cx.background_executor().spawn({
|
||||
let store = args
|
||||
.provider()
|
||||
.ok_or_else(|| anyhow!("no docs provider specified"))
|
||||
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
|
||||
async move {
|
||||
let (provider, key) = match args {
|
||||
let (provider, key) = match args.clone() {
|
||||
DocsSlashCommandArgs::NoProvider => bail!("no docs provider specified"),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider, package, ..
|
||||
@@ -219,6 +270,12 @@ impl SlashCommand for DocsSlashCommand {
|
||||
};
|
||||
|
||||
let store = store?;
|
||||
|
||||
if let Some(package) = args.package() {
|
||||
Self::run_just_in_time_indexing(store.clone(), key.clone(), package, executor)
|
||||
.await;
|
||||
}
|
||||
|
||||
let (text, ranges) = if let Some((prefix, _)) = key.split_once('*') {
|
||||
let docs = store.load_many_by_prefix(prefix.to_string()).await?;
|
||||
|
||||
@@ -269,7 +326,7 @@ fn is_item_path_delimiter(char: char) -> bool {
|
||||
!char.is_alphanumeric() && char != '-' && char != '_'
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub(crate) enum DocsSlashCommandArgs {
|
||||
NoProvider,
|
||||
SearchPackageDocs {
|
||||
|
||||
@@ -10,7 +10,7 @@ use assistant_slash_command::{
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler};
|
||||
use http::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -29,7 +29,7 @@ impl FileSlashCommand {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let entries = workspace.recent_navigation_history(Some(10), cx);
|
||||
let path_prefix: Arc<str> = "".into();
|
||||
let path_prefix: Arc<str> = Arc::default();
|
||||
Task::ready(
|
||||
entries
|
||||
.into_iter()
|
||||
@@ -188,7 +188,7 @@ fn collect_files(
|
||||
let project_handle = project.downgrade();
|
||||
let snapshots = project
|
||||
.read(cx)
|
||||
.worktrees()
|
||||
.worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
cx.spawn(|mut cx| async move {
|
||||
|
||||
@@ -75,7 +75,7 @@ impl ProjectSlashCommand {
|
||||
}
|
||||
|
||||
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees().next()?;
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path("Cargo.toml")?;
|
||||
let path = ProjectPath {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use crate::{
|
||||
assistant_settings::AssistantSettings, humanize_token_count,
|
||||
prompts::generate_terminal_assistant_prompt, AssistantPanel, AssistantPanelEvent,
|
||||
CompletionProvider, LanguageModelRequest, LanguageModelRequestMessage, Role,
|
||||
humanize_token_count, prompts::generate_terminal_assistant_prompt, AssistantPanel,
|
||||
AssistantPanelEvent, LanguageModelCompletionProvider, ModelSelector,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
@@ -13,11 +12,12 @@ use editor::{
|
||||
use fs::Fs;
|
||||
use futures::{channel::mpsc, SinkExt, StreamExt};
|
||||
use gpui::{
|
||||
AppContext, Context, EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, Global,
|
||||
Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, WeakView, WhiteSpace,
|
||||
AppContext, Context, EventEmitter, FocusHandle, FocusableView, Global, Model, ModelContext,
|
||||
Subscription, Task, TextStyle, UpdateGlobal, View, WeakView,
|
||||
};
|
||||
use language::Buffer;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
cmp,
|
||||
sync::Arc,
|
||||
@@ -26,7 +26,7 @@ use std::{
|
||||
use terminal::Terminal;
|
||||
use terminal_view::TerminalView;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
use ui::{prelude::*, IconButtonShape, Tooltip};
|
||||
use util::ResultExt;
|
||||
use workspace::{notifications::NotificationId, Toast, Workspace};
|
||||
|
||||
@@ -73,11 +73,13 @@ impl TerminalInlineAssistant {
|
||||
terminal_view: &View<TerminalView>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
initial_prompt: Option<String>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let terminal = terminal_view.read(cx).terminal().clone();
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
let prompt_buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let prompt_buffer =
|
||||
cx.new_model(|cx| Buffer::local(initial_prompt.unwrap_or_default(), cx));
|
||||
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
let codegen = cx.new_model(|_| Codegen::new(terminal, self.telemetry.clone()));
|
||||
|
||||
@@ -212,8 +214,6 @@ impl TerminalInlineAssistant {
|
||||
) -> Result<LanguageModelRequest> {
|
||||
let assist = self.assists.get(&assist_id).context("invalid assist")?;
|
||||
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
|
||||
let shell = std::env::var("SHELL").ok();
|
||||
let working_directory = assist
|
||||
.terminal
|
||||
@@ -265,7 +265,6 @@ impl TerminalInlineAssistant {
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
model,
|
||||
messages,
|
||||
stop: Vec::new(),
|
||||
temperature: 1.0,
|
||||
@@ -448,22 +447,19 @@ impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
|
||||
impl Render for PromptEditor {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let fs = self.fs.clone();
|
||||
|
||||
let buttons = match &self.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
),
|
||||
IconButton::new("start", IconName::Sparkle)
|
||||
IconButton::new("start", IconName::SparkleAlt)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Generate", &menu::Confirm, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StartRequested)),
|
||||
@@ -474,15 +470,14 @@ impl Render for PromptEditor {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::text("Cancel Assist", cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
),
|
||||
IconButton::new("stop", IconName::Stop)
|
||||
.icon_color(Color::Error)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Interrupt Generation",
|
||||
@@ -500,7 +495,7 @@ impl Render for PromptEditor {
|
||||
vec![
|
||||
IconButton::new("cancel", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx))
|
||||
.on_click(
|
||||
cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)),
|
||||
@@ -508,8 +503,7 @@ impl Render for PromptEditor {
|
||||
if self.edited_since_done {
|
||||
IconButton::new("restart", IconName::RotateCw)
|
||||
.icon_color(Color::Info)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Restart Generation",
|
||||
@@ -524,7 +518,7 @@ impl Render for PromptEditor {
|
||||
} else {
|
||||
IconButton::new("confirm", IconName::Play)
|
||||
.icon_color(Color::Info)
|
||||
.size(ButtonSize::None)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::for_action("Execute generated command", &menu::Confirm, cx)
|
||||
})
|
||||
@@ -552,59 +546,27 @@ impl Render for PromptEditor {
|
||||
.w_12()
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.child(
|
||||
PopoverMenu::new("model-switcher")
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::global(cx).available_models(cx)
|
||||
{
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
move |_| {
|
||||
Label::new(model.display_name())
|
||||
.into_any_element()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = fs.clone();
|
||||
let model = model.clone();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
move |settings| settings.set_model(model),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
menu
|
||||
})
|
||||
.into()
|
||||
})
|
||||
.trigger(
|
||||
IconButton::new("context", IconName::Settings)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"Using {}",
|
||||
CompletionProvider::global(cx)
|
||||
.model()
|
||||
.display_name()
|
||||
),
|
||||
None,
|
||||
"Click to Change Model",
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.anchor(gpui::AnchorCorner::BottomRight),
|
||||
)
|
||||
.child(ModelSelector::new(
|
||||
self.fs.clone(),
|
||||
IconButton::new("context", IconName::Settings)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"Using {}",
|
||||
LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|model| model.name().0)
|
||||
.unwrap_or_else(|| "No model selected".into()),
|
||||
),
|
||||
None,
|
||||
"Change Model",
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
))
|
||||
.children(
|
||||
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
|
||||
let error_message = SharedString::from(error.to_string());
|
||||
@@ -745,13 +707,18 @@ impl PromptEditor {
|
||||
inline_assistant.request_for_inline_assist(assist_id, cx)
|
||||
})??;
|
||||
|
||||
let token_count = cx
|
||||
.update(|cx| CompletionProvider::global(cx).count_tokens(request, cx))?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
if let Some(token_count) = cx.update(|cx| {
|
||||
LanguageModelCompletionProvider::read_global(cx).count_tokens(request, cx)
|
||||
})? {
|
||||
let token_count = token_count.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -876,7 +843,7 @@ impl PromptEditor {
|
||||
}
|
||||
|
||||
fn render_token_count(&self, cx: &mut ViewContext<Self>) -> Option<impl IntoElement> {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let model = LanguageModelCompletionProvider::read_global(cx).active_model()?;
|
||||
let token_count = self.token_count?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
@@ -942,14 +909,11 @@ impl PromptEditor {
|
||||
},
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: FontWeight::NORMAL,
|
||||
font_style: FontStyle::Normal,
|
||||
font_weight: settings.ui_font.weight,
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
..Default::default()
|
||||
};
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
@@ -983,7 +947,7 @@ impl TerminalTransaction {
|
||||
}
|
||||
|
||||
pub fn push(&mut self, hunk: String, cx: &mut AppContext) {
|
||||
// Ensure that the assistant cannot accidently execute commands that are streamed into the terminal
|
||||
// Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal
|
||||
let input = hunk.replace(CARRIAGE_RETURN, " ");
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(input));
|
||||
@@ -1025,8 +989,12 @@ impl Codegen {
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
|
||||
let telemetry = self.telemetry.clone();
|
||||
let model_telemetry_id = prompt.model.telemetry_id();
|
||||
let response = CompletionProvider::global(cx).complete(prompt, cx);
|
||||
let model_telemetry_id = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|m| m.telemetry_id())
|
||||
.unwrap_or_default();
|
||||
let response =
|
||||
LanguageModelCompletionProvider::read_global(cx).stream_completion(prompt, cx);
|
||||
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
let response = response.await;
|
||||
@@ -1037,8 +1005,8 @@ impl Codegen {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let task = async {
|
||||
let mut response = response.inner.await?;
|
||||
while let Some(chunk) = response.next().await {
|
||||
let mut chunks = response?;
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
# Assistant Tooling
|
||||
|
||||
Bringing Language Model tool calling to GPUI.
|
||||
|
||||
This unlocks:
|
||||
|
||||
- **Structured Extraction** of model responses
|
||||
- **Validation** of model inputs
|
||||
- **Execution** of chosen tools
|
||||
|
||||
## Overview
|
||||
|
||||
Language Models can produce structured outputs that are perfect for calling functions. The most famous of these is OpenAI's tool calling. When making a chat completion you can pass a list of tools available to the model. The model will choose `0..n` tools to help them complete a user's task. It's up to _you_ to create the tools that the model can call.
|
||||
|
||||
> **User**: "Hey I need help with implementing a collapsible panel in GPUI"
|
||||
>
|
||||
> **Assistant**: "Sure, I can help with that. Let me see what I can find."
|
||||
>
|
||||
> `tool_calls: ["name": "query_codebase", arguments: "{ 'query': 'GPUI collapsible panel' }"]`
|
||||
>
|
||||
> `result: "['crates/gpui/src/panel.rs:12: impl Panel { ... }', 'crates/gpui/src/panel.rs:20: impl Panel { ... }']"`
|
||||
>
|
||||
> **Assistant**: "Here are some excerpts from the GPUI codebase that might help you."
|
||||
|
||||
This library is designed to facilitate this interaction mode by allowing you to go from `struct` to `tool` with two simple traits, `LanguageModelTool` and `ToolView`.
|
||||
|
||||
## Using the Tool Registry
|
||||
|
||||
```rust
|
||||
let mut tool_registry = ToolRegistry::new();
|
||||
tool_registry
|
||||
.register(WeatherTool { api_client },
|
||||
})
|
||||
.unwrap(); // You can only register one tool per name
|
||||
|
||||
let completion = cx.update(|cx| {
|
||||
CompletionProvider::get(cx).complete(
|
||||
model_name,
|
||||
messages,
|
||||
Vec::new(),
|
||||
1.0,
|
||||
// The definitions get passed directly to OpenAI when you want
|
||||
// the model to be able to call your tool
|
||||
tool_registry.definitions(),
|
||||
)
|
||||
});
|
||||
|
||||
let mut stream = completion?.await?;
|
||||
|
||||
let mut message = AssistantMessage::new();
|
||||
|
||||
while let Some(delta) = stream.next().await {
|
||||
// As messages stream in, you'll get both assistant content
|
||||
if let Some(content) = &delta.content {
|
||||
message
|
||||
.body
|
||||
.update(cx, |message, cx| message.append(&content, cx));
|
||||
}
|
||||
|
||||
// And tool calls!
|
||||
for tool_call_delta in delta.tool_calls {
|
||||
let index = tool_call_delta.index as usize;
|
||||
if index >= message.tool_calls.len() {
|
||||
message.tool_calls.resize_with(index + 1, Default::default);
|
||||
}
|
||||
let tool_call = &mut message.tool_calls[index];
|
||||
|
||||
// Build up an ID
|
||||
if let Some(id) = &tool_call_delta.id {
|
||||
tool_call.id.push_str(id);
|
||||
}
|
||||
|
||||
tool_registry.update_tool_call(
|
||||
tool_call,
|
||||
tool_call_delta.name.as_deref(),
|
||||
tool_call_delta.arguments.as_deref(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Once the stream of tokens is complete, you can exexute the tool call by calling `tool_registry.execute_tool_call(tool_call, cx)`, which returns a `Task<Result<()>>`.
|
||||
|
||||
As the tokens stream in and tool calls are executed, your `ToolView` will get updates. Render each tool call by passing that `tool_call` in to `tool_registry.render_tool_call(tool_call, cx)`. The final message for the model can be pulled by calling `self.tool_registry.content_for_tool_call( tool_call, &mut project_context, cx, )`.
|
||||
@@ -1,13 +0,0 @@
|
||||
mod attachment_registry;
|
||||
mod project_context;
|
||||
mod tool_registry;
|
||||
|
||||
pub use attachment_registry::{
|
||||
AttachmentOutput, AttachmentRegistry, LanguageModelAttachment, SavedUserAttachment,
|
||||
UserAttachment,
|
||||
};
|
||||
pub use project_context::ProjectContext;
|
||||
pub use tool_registry::{
|
||||
LanguageModelTool, SavedToolFunctionCall, ToolFunctionCall, ToolFunctionDefinition,
|
||||
ToolRegistry, ToolView,
|
||||
};
|
||||
@@ -1,234 +0,0 @@
|
||||
use crate::ProjectContext;
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::HashMap;
|
||||
use futures::future::join_all;
|
||||
use gpui::{AnyView, Render, Task, View, WindowContext};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use serde_json::value::RawValue;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub struct AttachmentRegistry {
|
||||
registered_attachments: HashMap<TypeId, RegisteredAttachment>,
|
||||
}
|
||||
|
||||
pub trait AttachmentOutput {
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String;
|
||||
}
|
||||
|
||||
pub trait LanguageModelAttachment {
|
||||
type Output: DeserializeOwned + Serialize + 'static;
|
||||
type View: Render + AttachmentOutput;
|
||||
|
||||
fn name(&self) -> Arc<str>;
|
||||
fn run(&self, cx: &mut WindowContext) -> Task<Result<Self::Output>>;
|
||||
fn view(&self, output: Result<Self::Output>, cx: &mut WindowContext) -> View<Self::View>;
|
||||
}
|
||||
|
||||
/// A collected attachment from running an attachment tool
|
||||
pub struct UserAttachment {
|
||||
pub view: AnyView,
|
||||
name: Arc<str>,
|
||||
serialized_output: Result<Box<RawValue>, String>,
|
||||
generate_fn: fn(AnyView, &mut ProjectContext, cx: &mut WindowContext) -> String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedUserAttachment {
|
||||
name: Arc<str>,
|
||||
serialized_output: Result<Box<RawValue>, String>,
|
||||
}
|
||||
|
||||
/// Internal representation of an attachment tool to allow us to treat them dynamically
|
||||
struct RegisteredAttachment {
|
||||
name: Arc<str>,
|
||||
enabled: AtomicBool,
|
||||
call: Box<dyn Fn(&mut WindowContext) -> Task<Result<UserAttachment>>>,
|
||||
deserialize: Box<dyn Fn(&SavedUserAttachment, &mut WindowContext) -> Result<UserAttachment>>,
|
||||
}
|
||||
|
||||
impl AttachmentRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
registered_attachments: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register<A: LanguageModelAttachment + 'static>(&mut self, attachment: A) {
|
||||
let attachment = Arc::new(attachment);
|
||||
|
||||
let call = Box::new({
|
||||
let attachment = attachment.clone();
|
||||
move |cx: &mut WindowContext| {
|
||||
let result = attachment.run(cx);
|
||||
let attachment = attachment.clone();
|
||||
cx.spawn(move |mut cx| async move {
|
||||
let result: Result<A::Output> = result.await;
|
||||
let serialized_output =
|
||||
result
|
||||
.as_ref()
|
||||
.map_err(ToString::to_string)
|
||||
.and_then(|output| {
|
||||
Ok(RawValue::from_string(
|
||||
serde_json::to_string(output).map_err(|e| e.to_string())?,
|
||||
)
|
||||
.unwrap())
|
||||
});
|
||||
|
||||
let view = cx.update(|cx| attachment.view(result, cx))?;
|
||||
|
||||
Ok(UserAttachment {
|
||||
name: attachment.name(),
|
||||
view: view.into(),
|
||||
generate_fn: generate::<A>,
|
||||
serialized_output,
|
||||
})
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
let deserialize = Box::new({
|
||||
let attachment = attachment.clone();
|
||||
move |saved_attachment: &SavedUserAttachment, cx: &mut WindowContext| {
|
||||
let serialized_output = saved_attachment.serialized_output.clone();
|
||||
let output = match &serialized_output {
|
||||
Ok(serialized_output) => {
|
||||
Ok(serde_json::from_str::<A::Output>(serialized_output.get())?)
|
||||
}
|
||||
Err(error) => Err(anyhow!("{error}")),
|
||||
};
|
||||
let view = attachment.view(output, cx).into();
|
||||
|
||||
Ok(UserAttachment {
|
||||
name: saved_attachment.name.clone(),
|
||||
view,
|
||||
serialized_output,
|
||||
generate_fn: generate::<A>,
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
self.registered_attachments.insert(
|
||||
TypeId::of::<A>(),
|
||||
RegisteredAttachment {
|
||||
name: attachment.name(),
|
||||
call,
|
||||
deserialize,
|
||||
enabled: AtomicBool::new(true),
|
||||
},
|
||||
);
|
||||
return;
|
||||
|
||||
fn generate<T: LanguageModelAttachment>(
|
||||
view: AnyView,
|
||||
project: &mut ProjectContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> String {
|
||||
view.downcast::<T::View>()
|
||||
.unwrap()
|
||||
.update(cx, |view, cx| T::View::generate(view, project, cx))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_attachment_tool_enabled<A: LanguageModelAttachment + 'static>(
|
||||
&self,
|
||||
is_enabled: bool,
|
||||
) {
|
||||
if let Some(attachment) = self.registered_attachments.get(&TypeId::of::<A>()) {
|
||||
attachment.enabled.store(is_enabled, SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_attachment_tool_enabled<A: LanguageModelAttachment + 'static>(&self) -> bool {
|
||||
if let Some(attachment) = self.registered_attachments.get(&TypeId::of::<A>()) {
|
||||
attachment.enabled.load(SeqCst)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn call<A: LanguageModelAttachment + 'static>(
|
||||
&self,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<UserAttachment>> {
|
||||
let Some(attachment) = self.registered_attachments.get(&TypeId::of::<A>()) else {
|
||||
return Task::ready(Err(anyhow!("no attachment tool")));
|
||||
};
|
||||
|
||||
(attachment.call)(cx)
|
||||
}
|
||||
|
||||
pub fn call_all_attachment_tools(
|
||||
self: Arc<Self>,
|
||||
cx: &mut WindowContext<'_>,
|
||||
) -> Task<Result<Vec<UserAttachment>>> {
|
||||
let this = self.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
let attachment_tasks = cx.update(|cx| {
|
||||
let mut tasks = Vec::new();
|
||||
for attachment in this
|
||||
.registered_attachments
|
||||
.values()
|
||||
.filter(|attachment| attachment.enabled.load(SeqCst))
|
||||
{
|
||||
tasks.push((attachment.call)(cx))
|
||||
}
|
||||
|
||||
tasks
|
||||
})?;
|
||||
|
||||
let attachments = join_all(attachment_tasks.into_iter()).await;
|
||||
|
||||
Ok(attachments
|
||||
.into_iter()
|
||||
.filter_map(|attachment| attachment.log_err())
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn serialize_user_attachment(
|
||||
&self,
|
||||
user_attachment: &UserAttachment,
|
||||
) -> SavedUserAttachment {
|
||||
SavedUserAttachment {
|
||||
name: user_attachment.name.clone(),
|
||||
serialized_output: user_attachment.serialized_output.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_user_attachment(
|
||||
&self,
|
||||
saved_user_attachment: SavedUserAttachment,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<UserAttachment> {
|
||||
if let Some(registered_attachment) = self
|
||||
.registered_attachments
|
||||
.values()
|
||||
.find(|attachment| attachment.name == saved_user_attachment.name)
|
||||
{
|
||||
(registered_attachment.deserialize)(&saved_user_attachment, cx)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"no attachment tool for name {}",
|
||||
saved_user_attachment.name
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl UserAttachment {
|
||||
pub fn generate(&self, output: &mut ProjectContext, cx: &mut WindowContext) -> Option<String> {
|
||||
let result = (self.generate_fn)(self.view.clone(), output, cx);
|
||||
if result.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,296 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AppContext, Model, Task, WeakModel};
|
||||
use project::{Fs, Project, ProjectPath, Worktree};
|
||||
use std::{cmp::Ordering, fmt::Write as _, ops::Range, sync::Arc};
|
||||
use sum_tree::TreeMap;
|
||||
|
||||
pub struct ProjectContext {
|
||||
files: TreeMap<ProjectPath, PathState>,
|
||||
project: WeakModel<Project>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum PathState {
|
||||
PathOnly,
|
||||
EntireFile,
|
||||
Excerpts { ranges: Vec<Range<usize>> },
|
||||
}
|
||||
|
||||
impl ProjectContext {
|
||||
pub fn new(project: WeakModel<Project>, fs: Arc<dyn Fs>) -> Self {
|
||||
Self {
|
||||
files: TreeMap::default(),
|
||||
fs,
|
||||
project,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_path(&mut self, project_path: ProjectPath) {
|
||||
if self.files.get(&project_path).is_none() {
|
||||
self.files.insert(project_path, PathState::PathOnly);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_excerpts(&mut self, project_path: ProjectPath, new_ranges: &[Range<usize>]) {
|
||||
let previous_state = self
|
||||
.files
|
||||
.get(&project_path)
|
||||
.unwrap_or(&PathState::PathOnly);
|
||||
|
||||
let mut ranges = match previous_state {
|
||||
PathState::EntireFile => return,
|
||||
PathState::PathOnly => Vec::new(),
|
||||
PathState::Excerpts { ranges } => ranges.to_vec(),
|
||||
};
|
||||
|
||||
for new_range in new_ranges {
|
||||
let ix = ranges.binary_search_by(|probe| {
|
||||
if probe.end < new_range.start {
|
||||
Ordering::Less
|
||||
} else if probe.start > new_range.end {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
});
|
||||
|
||||
match ix {
|
||||
Ok(mut ix) => {
|
||||
let existing = &mut ranges[ix];
|
||||
existing.start = existing.start.min(new_range.start);
|
||||
existing.end = existing.end.max(new_range.end);
|
||||
while ix + 1 < ranges.len() && ranges[ix + 1].start <= ranges[ix].end {
|
||||
ranges[ix].end = ranges[ix].end.max(ranges[ix + 1].end);
|
||||
ranges.remove(ix + 1);
|
||||
}
|
||||
while ix > 0 && ranges[ix - 1].end >= ranges[ix].start {
|
||||
ranges[ix].start = ranges[ix].start.min(ranges[ix - 1].start);
|
||||
ranges.remove(ix - 1);
|
||||
ix -= 1;
|
||||
}
|
||||
}
|
||||
Err(ix) => {
|
||||
ranges.insert(ix, new_range.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.files
|
||||
.insert(project_path, PathState::Excerpts { ranges });
|
||||
}
|
||||
|
||||
pub fn add_file(&mut self, project_path: ProjectPath) {
|
||||
self.files.insert(project_path, PathState::EntireFile);
|
||||
}
|
||||
|
||||
pub fn generate_system_message(&self, cx: &mut AppContext) -> Task<Result<String>> {
|
||||
let project = self
|
||||
.project
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("project dropped"));
|
||||
let files = self.files.clone();
|
||||
let fs = self.fs.clone();
|
||||
cx.spawn(|cx| async move {
|
||||
let project = project?;
|
||||
let mut result = "project structure:\n".to_string();
|
||||
|
||||
let mut last_worktree: Option<Model<Worktree>> = None;
|
||||
for (project_path, path_state) in files.iter() {
|
||||
if let Some(worktree) = &last_worktree {
|
||||
if worktree.read_with(&cx, |tree, _| tree.id())? != project_path.worktree_id {
|
||||
last_worktree = None;
|
||||
}
|
||||
}
|
||||
|
||||
let worktree;
|
||||
if let Some(last_worktree) = &last_worktree {
|
||||
worktree = last_worktree.clone();
|
||||
} else if let Some(tree) = project.read_with(&cx, |project, cx| {
|
||||
project.worktree_for_id(project_path.worktree_id, cx)
|
||||
})? {
|
||||
worktree = tree;
|
||||
last_worktree = Some(worktree.clone());
|
||||
let worktree_name =
|
||||
worktree.read_with(&cx, |tree, _cx| tree.root_name().to_string())?;
|
||||
writeln!(&mut result, "# {}", worktree_name).unwrap();
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
let worktree_abs_path = worktree.read_with(&cx, |tree, _cx| tree.abs_path())?;
|
||||
let path = &project_path.path;
|
||||
writeln!(&mut result, "## {}", path.display()).unwrap();
|
||||
|
||||
match path_state {
|
||||
PathState::PathOnly => {}
|
||||
PathState::EntireFile => {
|
||||
let text = fs.load(&worktree_abs_path.join(&path)).await?;
|
||||
writeln!(&mut result, "~~~\n{text}\n~~~").unwrap();
|
||||
}
|
||||
PathState::Excerpts { ranges } => {
|
||||
let text = fs.load(&worktree_abs_path.join(&path)).await?;
|
||||
|
||||
writeln!(&mut result, "~~~").unwrap();
|
||||
|
||||
// Assumption: ranges are in order, not overlapping
|
||||
let mut prev_range_end = 0;
|
||||
for range in ranges {
|
||||
if range.start > prev_range_end {
|
||||
writeln!(&mut result, "...").unwrap();
|
||||
prev_range_end = range.end;
|
||||
}
|
||||
|
||||
let mut start = range.start;
|
||||
let mut end = range.end.min(text.len());
|
||||
while !text.is_char_boundary(start) {
|
||||
start += 1;
|
||||
}
|
||||
while !text.is_char_boundary(end) {
|
||||
end -= 1;
|
||||
}
|
||||
result.push_str(&text[start..end]);
|
||||
if !result.ends_with('\n') {
|
||||
result.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
if prev_range_end < text.len() {
|
||||
writeln!(&mut result, "...").unwrap();
|
||||
}
|
||||
|
||||
writeln!(&mut result, "~~~").unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
use gpui::TestAppContext;
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use unindent::Unindent as _;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_system_message_generation(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let file_3_contents = r#"
|
||||
fn test1() {}
|
||||
fn test2() {}
|
||||
fn test3() {}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/code",
|
||||
json!({
|
||||
"root1": {
|
||||
"lib": {
|
||||
"file1.rs": "mod example;",
|
||||
"file2.rs": "",
|
||||
},
|
||||
"test": {
|
||||
"file3.rs": file_3_contents,
|
||||
}
|
||||
},
|
||||
"root2": {
|
||||
"src": {
|
||||
"main.rs": ""
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(
|
||||
fs.clone(),
|
||||
["/code/root1".as_ref(), "/code/root2".as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let worktree_ids = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees()
|
||||
.map(|worktree| worktree.read(cx).id())
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let mut ax = ProjectContext::new(project.downgrade(), fs);
|
||||
|
||||
ax.add_file(ProjectPath {
|
||||
worktree_id: worktree_ids[0],
|
||||
path: Path::new("lib/file1.rs").into(),
|
||||
});
|
||||
|
||||
let message = cx
|
||||
.update(|cx| ax.generate_system_message(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
r#"
|
||||
project structure:
|
||||
# root1
|
||||
## lib/file1.rs
|
||||
~~~
|
||||
mod example;
|
||||
~~~
|
||||
"#
|
||||
.unindent(),
|
||||
message
|
||||
);
|
||||
|
||||
ax.add_excerpts(
|
||||
ProjectPath {
|
||||
worktree_id: worktree_ids[0],
|
||||
path: Path::new("test/file3.rs").into(),
|
||||
},
|
||||
&[
|
||||
file_3_contents.find("fn test2").unwrap()
|
||||
..file_3_contents.find("fn test3").unwrap(),
|
||||
],
|
||||
);
|
||||
|
||||
let message = cx
|
||||
.update(|cx| ax.generate_system_message(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
r#"
|
||||
project structure:
|
||||
# root1
|
||||
## lib/file1.rs
|
||||
~~~
|
||||
mod example;
|
||||
~~~
|
||||
## test/file3.rs
|
||||
~~~
|
||||
...
|
||||
fn test2() {}
|
||||
...
|
||||
~~~
|
||||
"#
|
||||
.unindent(),
|
||||
message
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,526 +0,0 @@
|
||||
use crate::ProjectContext;
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AnyElement, AnyView, IntoElement, Render, Task, View, WindowContext};
|
||||
use repair_json::repair;
|
||||
use schemars::{schema::RootSchema, schema_for, JsonSchema};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use serde_json::value::RawValue;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
fmt::Display,
|
||||
mem,
|
||||
sync::atomic::{AtomicBool, Ordering::SeqCst},
|
||||
};
|
||||
use ui::ViewContext;
|
||||
|
||||
pub struct ToolRegistry {
|
||||
registered_tools: HashMap<String, RegisteredTool>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ToolFunctionCall {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
state: ToolFunctionCallState,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
enum ToolFunctionCallState {
|
||||
#[default]
|
||||
Initializing,
|
||||
NoSuchTool,
|
||||
KnownTool(Box<dyn InternalToolView>),
|
||||
ExecutedTool(Box<dyn InternalToolView>),
|
||||
}
|
||||
|
||||
trait InternalToolView {
|
||||
fn view(&self) -> AnyView;
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String;
|
||||
fn try_set_input(&self, input: &str, cx: &mut WindowContext);
|
||||
fn execute(&self, cx: &mut WindowContext) -> Task<Result<()>>;
|
||||
fn serialize_output(&self, cx: &mut WindowContext) -> Result<Box<RawValue>>;
|
||||
fn deserialize_output(&self, raw_value: &RawValue, cx: &mut WindowContext) -> Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub struct SavedToolFunctionCall {
|
||||
id: String,
|
||||
name: String,
|
||||
arguments: String,
|
||||
state: SavedToolFunctionCallState,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
enum SavedToolFunctionCallState {
|
||||
#[default]
|
||||
Initializing,
|
||||
NoSuchTool,
|
||||
KnownTool,
|
||||
ExecutedTool(Box<RawValue>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct ToolFunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub parameters: RootSchema,
|
||||
}
|
||||
|
||||
pub trait LanguageModelTool {
|
||||
type View: ToolView;
|
||||
|
||||
/// Returns the name of the tool.
|
||||
///
|
||||
/// This name is exposed to the language model to allow the model to pick
|
||||
/// which tools to use. As this name is used to identify the tool within a
|
||||
/// tool registry, it should be unique.
|
||||
fn name(&self) -> String;
|
||||
|
||||
/// Returns the description of the tool.
|
||||
///
|
||||
/// This can be used to _prompt_ the model as to what the tool does.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// Returns the OpenAI Function definition for the tool, for direct use with OpenAI's API.
|
||||
fn definition(&self) -> ToolFunctionDefinition {
|
||||
let root_schema = schema_for!(<Self::View as ToolView>::Input);
|
||||
|
||||
ToolFunctionDefinition {
|
||||
name: self.name(),
|
||||
description: self.description(),
|
||||
parameters: root_schema,
|
||||
}
|
||||
}
|
||||
|
||||
/// A view of the output of running the tool, for displaying to the user.
|
||||
fn view(&self, cx: &mut WindowContext) -> View<Self::View>;
|
||||
}
|
||||
|
||||
pub trait ToolView: Render {
|
||||
/// The input type that will be passed in to `execute` when the tool is called
|
||||
/// by the language model.
|
||||
type Input: DeserializeOwned + JsonSchema;
|
||||
|
||||
/// The output returned by executing the tool.
|
||||
type SerializedState: DeserializeOwned + Serialize;
|
||||
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut ViewContext<Self>) -> String;
|
||||
fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext<Self>);
|
||||
fn execute(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>>;
|
||||
|
||||
fn serialize(&self, cx: &mut ViewContext<Self>) -> Self::SerializedState;
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
output: Self::SerializedState,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
||||
struct RegisteredTool {
|
||||
enabled: AtomicBool,
|
||||
type_id: TypeId,
|
||||
build_view: Box<dyn Fn(&mut WindowContext) -> Box<dyn InternalToolView>>,
|
||||
definition: ToolFunctionDefinition,
|
||||
}
|
||||
|
||||
impl ToolRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
registered_tools: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_tool_enabled<T: 'static + LanguageModelTool>(&self, is_enabled: bool) {
|
||||
for tool in self.registered_tools.values() {
|
||||
if tool.type_id == TypeId::of::<T>() {
|
||||
tool.enabled.store(is_enabled, SeqCst);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_tool_enabled<T: 'static + LanguageModelTool>(&self) -> bool {
|
||||
for tool in self.registered_tools.values() {
|
||||
if tool.type_id == TypeId::of::<T>() {
|
||||
return tool.enabled.load(SeqCst);
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn definitions(&self) -> Vec<ToolFunctionDefinition> {
|
||||
self.registered_tools
|
||||
.values()
|
||||
.filter(|tool| tool.enabled.load(SeqCst))
|
||||
.map(|tool| tool.definition.clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn update_tool_call(
|
||||
&self,
|
||||
call: &mut ToolFunctionCall,
|
||||
name: Option<&str>,
|
||||
arguments: Option<&str>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
if let Some(name) = name {
|
||||
call.name.push_str(name);
|
||||
}
|
||||
if let Some(arguments) = arguments {
|
||||
if call.arguments.is_empty() {
|
||||
if let Some(tool) = self.registered_tools.get(&call.name) {
|
||||
let view = (tool.build_view)(cx);
|
||||
call.state = ToolFunctionCallState::KnownTool(view);
|
||||
} else {
|
||||
call.state = ToolFunctionCallState::NoSuchTool;
|
||||
}
|
||||
}
|
||||
call.arguments.push_str(arguments);
|
||||
|
||||
if let ToolFunctionCallState::KnownTool(view) = &call.state {
|
||||
if let Ok(repaired_arguments) = repair(call.arguments.clone()) {
|
||||
view.try_set_input(&repaired_arguments, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute_tool_call(
|
||||
&self,
|
||||
tool_call: &mut ToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
if let ToolFunctionCallState::KnownTool(view) = mem::take(&mut tool_call.state) {
|
||||
let task = view.execute(cx);
|
||||
tool_call.state = ToolFunctionCallState::ExecutedTool(view);
|
||||
Some(task)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_tool_call(
|
||||
&self,
|
||||
tool_call: &ToolFunctionCall,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
match &tool_call.state {
|
||||
ToolFunctionCallState::NoSuchTool => {
|
||||
Some(ui::Label::new("No such tool").into_any_element())
|
||||
}
|
||||
ToolFunctionCallState::Initializing => None,
|
||||
ToolFunctionCallState::KnownTool(view) | ToolFunctionCallState::ExecutedTool(view) => {
|
||||
Some(view.view().into_any_element())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content_for_tool_call(
|
||||
&self,
|
||||
tool_call: &ToolFunctionCall,
|
||||
project_context: &mut ProjectContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> String {
|
||||
match &tool_call.state {
|
||||
ToolFunctionCallState::Initializing => String::new(),
|
||||
ToolFunctionCallState::NoSuchTool => {
|
||||
format!("No such tool: {}", tool_call.name)
|
||||
}
|
||||
ToolFunctionCallState::KnownTool(view) | ToolFunctionCallState::ExecutedTool(view) => {
|
||||
view.generate(project_context, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_tool_call(
|
||||
&self,
|
||||
call: &ToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<SavedToolFunctionCall> {
|
||||
Ok(SavedToolFunctionCall {
|
||||
id: call.id.clone(),
|
||||
name: call.name.clone(),
|
||||
arguments: call.arguments.clone(),
|
||||
state: match &call.state {
|
||||
ToolFunctionCallState::Initializing => SavedToolFunctionCallState::Initializing,
|
||||
ToolFunctionCallState::NoSuchTool => SavedToolFunctionCallState::NoSuchTool,
|
||||
ToolFunctionCallState::KnownTool(_) => SavedToolFunctionCallState::KnownTool,
|
||||
ToolFunctionCallState::ExecutedTool(view) => {
|
||||
SavedToolFunctionCallState::ExecutedTool(view.serialize_output(cx)?)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deserialize_tool_call(
|
||||
&self,
|
||||
call: &SavedToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<ToolFunctionCall> {
|
||||
let Some(tool) = self.registered_tools.get(&call.name) else {
|
||||
return Err(anyhow!("no such tool {}", call.name));
|
||||
};
|
||||
|
||||
Ok(ToolFunctionCall {
|
||||
id: call.id.clone(),
|
||||
name: call.name.clone(),
|
||||
arguments: call.arguments.clone(),
|
||||
state: match &call.state {
|
||||
SavedToolFunctionCallState::Initializing => ToolFunctionCallState::Initializing,
|
||||
SavedToolFunctionCallState::NoSuchTool => ToolFunctionCallState::NoSuchTool,
|
||||
SavedToolFunctionCallState::KnownTool => {
|
||||
log::error!("Deserialized tool that had not executed");
|
||||
let view = (tool.build_view)(cx);
|
||||
view.try_set_input(&call.arguments, cx);
|
||||
ToolFunctionCallState::KnownTool(view)
|
||||
}
|
||||
SavedToolFunctionCallState::ExecutedTool(output) => {
|
||||
let view = (tool.build_view)(cx);
|
||||
view.try_set_input(&call.arguments, cx);
|
||||
view.deserialize_output(output, cx)?;
|
||||
ToolFunctionCallState::ExecutedTool(view)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub fn register<T: 'static + LanguageModelTool>(&mut self, tool: T) -> Result<()> {
|
||||
let name = tool.name();
|
||||
let registered_tool = RegisteredTool {
|
||||
type_id: TypeId::of::<T>(),
|
||||
definition: tool.definition(),
|
||||
enabled: AtomicBool::new(true),
|
||||
build_view: Box::new(move |cx: &mut WindowContext| Box::new(tool.view(cx))),
|
||||
};
|
||||
|
||||
let previous = self.registered_tools.insert(name.clone(), registered_tool);
|
||||
if previous.is_some() {
|
||||
return Err(anyhow!("already registered a tool with name {}", name));
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToolView> InternalToolView for View<T> {
|
||||
fn view(&self) -> AnyView {
|
||||
self.clone().into()
|
||||
}
|
||||
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String {
|
||||
self.update(cx, |view, cx| view.generate(project, cx))
|
||||
}
|
||||
|
||||
fn try_set_input(&self, input: &str, cx: &mut WindowContext) {
|
||||
if let Ok(input) = serde_json::from_str::<T::Input>(input) {
|
||||
self.update(cx, |view, cx| {
|
||||
view.set_input(input, cx);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn execute(&self, cx: &mut WindowContext) -> Task<Result<()>> {
|
||||
self.update(cx, |view, cx| view.execute(cx))
|
||||
}
|
||||
|
||||
fn serialize_output(&self, cx: &mut WindowContext) -> Result<Box<RawValue>> {
|
||||
let output = self.update(cx, |view, cx| view.serialize(cx));
|
||||
Ok(RawValue::from_string(serde_json::to_string(&output)?)?)
|
||||
}
|
||||
|
||||
fn deserialize_output(&self, output: &RawValue, cx: &mut WindowContext) -> Result<()> {
|
||||
let state = serde_json::from_str::<T::SerializedState>(output.get())?;
|
||||
self.update(cx, |view, cx| view.deserialize(state, cx))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ToolFunctionDefinition {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let schema = serde_json::to_string(&self.parameters).ok();
|
||||
let schema = schema.unwrap_or("None".to_string());
|
||||
write!(f, "Name: {}:\n", self.name)?;
|
||||
write!(f, "Description: {}\n", self.description)?;
|
||||
write!(f, "Parameters: {}", schema)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::{div, prelude::*, Render, TestAppContext};
|
||||
use gpui::{EmptyView, View};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema)]
|
||||
struct WeatherQuery {
|
||||
location: String,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]
|
||||
struct WeatherResult {
|
||||
location: String,
|
||||
temperature: f64,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
struct WeatherView {
|
||||
input: Option<WeatherQuery>,
|
||||
result: Option<WeatherResult>,
|
||||
|
||||
// Fake API call
|
||||
current_weather: WeatherResult,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct WeatherTool {
|
||||
current_weather: WeatherResult,
|
||||
}
|
||||
|
||||
impl WeatherView {
|
||||
fn new(current_weather: WeatherResult) -> Self {
|
||||
Self {
|
||||
input: None,
|
||||
result: None,
|
||||
current_weather,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for WeatherView {
|
||||
fn render(&mut self, _cx: &mut gpui::ViewContext<Self>) -> impl IntoElement {
|
||||
match self.result {
|
||||
Some(ref result) => div()
|
||||
.child(format!("temperature: {}", result.temperature))
|
||||
.into_any_element(),
|
||||
None => div().child("Calculating weather...").into_any_element(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolView for WeatherView {
|
||||
type Input = WeatherQuery;
|
||||
|
||||
type SerializedState = WeatherResult;
|
||||
|
||||
fn generate(&self, _output: &mut ProjectContext, _cx: &mut ViewContext<Self>) -> String {
|
||||
serde_json::to_string(&self.result).unwrap()
|
||||
}
|
||||
|
||||
fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext<Self>) {
|
||||
self.input = Some(input);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn execute(&mut self, _cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
let input = self.input.as_ref().unwrap();
|
||||
|
||||
let _location = input.location.clone();
|
||||
let _unit = input.unit.clone();
|
||||
|
||||
let weather = self.current_weather.clone();
|
||||
|
||||
self.result = Some(weather);
|
||||
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn serialize(&self, _cx: &mut ViewContext<Self>) -> Self::SerializedState {
|
||||
self.current_weather.clone()
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
output: Self::SerializedState,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) -> Result<()> {
|
||||
self.current_weather = output;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for WeatherTool {
|
||||
type View = WeatherView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"get_current_weather".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Fetches the current weather for a given location.".to_string()
|
||||
}
|
||||
|
||||
fn view(&self, cx: &mut WindowContext) -> View<Self::View> {
|
||||
cx.new_view(|_cx| WeatherView::new(self.current_weather.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_openai_weather_example(cx: &mut TestAppContext) {
|
||||
let (_, cx) = cx.add_window_view(|_cx| EmptyView);
|
||||
|
||||
let mut registry = ToolRegistry::new();
|
||||
registry
|
||||
.register(WeatherTool {
|
||||
current_weather: WeatherResult {
|
||||
location: "San Francisco".to_string(),
|
||||
temperature: 21.0,
|
||||
unit: "Celsius".to_string(),
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let definitions = registry.definitions();
|
||||
assert_eq!(
|
||||
definitions,
|
||||
[ToolFunctionDefinition {
|
||||
name: "get_current_weather".to_string(),
|
||||
description: "Fetches the current weather for a given location.".to_string(),
|
||||
parameters: serde_json::from_value(json!({
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "WeatherQuery",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"unit": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["location", "unit"]
|
||||
}))
|
||||
.unwrap(),
|
||||
}]
|
||||
);
|
||||
|
||||
let mut call = ToolFunctionCall {
|
||||
id: "the-id".to_string(),
|
||||
name: "get_cur".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let task = cx.update(|cx| {
|
||||
registry.update_tool_call(
|
||||
&mut call,
|
||||
Some("rent_weather"),
|
||||
Some(r#"{"location": "San Francisco","#),
|
||||
cx,
|
||||
);
|
||||
registry.update_tool_call(&mut call, None, Some(r#" "unit": "Celsius"}"#), cx);
|
||||
registry.execute_tool_call(&mut call, cx).unwrap()
|
||||
});
|
||||
task.await.unwrap();
|
||||
|
||||
match &call.state {
|
||||
ToolFunctionCallState::ExecutedTool(_view) => {}
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,11 +18,12 @@ client.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
gpui.workspace = true
|
||||
http.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
log.workspace = true
|
||||
markdown_preview.workspace = true
|
||||
menu.workspace = true
|
||||
paths.workspace = true
|
||||
release_channel.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
mod update_notification;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use client::{Client, TelemetrySettings, ZED_APP_PATH};
|
||||
use client::{Client, TelemetrySettings};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use db::RELEASE_CHANNEL;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
@@ -20,7 +20,7 @@ use smol::{fs, io::AsyncReadExt};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use smol::{fs::File, process::Command};
|
||||
|
||||
use http::{HttpClient, HttpClientWithUrl};
|
||||
use http_client::{HttpClient, HttpClientWithUrl};
|
||||
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
|
||||
use std::{
|
||||
env::{
|
||||
@@ -28,7 +28,7 @@ use std::{
|
||||
consts::{ARCH, OS},
|
||||
},
|
||||
ffi::OsString,
|
||||
path::PathBuf,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
@@ -288,7 +288,12 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
|
||||
Some(tab_description),
|
||||
cx,
|
||||
);
|
||||
workspace.add_item_to_active_pane(Box::new(view.clone()), None, cx);
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(view.clone()),
|
||||
None,
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
cx.notify();
|
||||
})
|
||||
.log_err();
|
||||
@@ -354,7 +359,6 @@ impl AutoUpdater {
|
||||
return;
|
||||
}
|
||||
|
||||
self.status = AutoUpdateStatus::Checking;
|
||||
cx.notify();
|
||||
|
||||
self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
|
||||
@@ -380,29 +384,65 @@ impl AutoUpdater {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
|
||||
let (client, current_version) = this.read_with(&cx, |this, _| {
|
||||
(this.http_client.clone(), this.current_version)
|
||||
})?;
|
||||
pub async fn get_latest_remote_server_release(
|
||||
os: &str,
|
||||
arch: &str,
|
||||
mut release_channel: ReleaseChannel,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
let this = cx.update(|cx| {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
})??;
|
||||
|
||||
let asset = match OS {
|
||||
"linux" => format!("zed-linux-{}.tar.gz", ARCH),
|
||||
"macos" => "Zed.dmg".into(),
|
||||
_ => return Err(anyhow!("auto-update not supported for OS {:?}", OS)),
|
||||
};
|
||||
if release_channel == ReleaseChannel::Dev {
|
||||
release_channel = ReleaseChannel::Nightly;
|
||||
}
|
||||
|
||||
let release = Self::get_latest_release(
|
||||
&this,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let servers_dir = paths::remote_servers_dir();
|
||||
let channel_dir = servers_dir.join(release_channel.dev_name());
|
||||
let platform_dir = channel_dir.join(format!("{}-{}", os, arch));
|
||||
let version_path = platform_dir.join(format!("{}.gz", release.version));
|
||||
smol::fs::create_dir_all(&platform_dir).await.ok();
|
||||
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
if smol::fs::metadata(&version_path).await.is_err() {
|
||||
log::info!("downloading zed-remote-server {os} {arch}");
|
||||
download_remote_server_binary(&version_path, release, client, cx).await?;
|
||||
}
|
||||
|
||||
Ok(version_path)
|
||||
}
|
||||
|
||||
async fn get_latest_release(
|
||||
this: &Model<Self>,
|
||||
asset: &str,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<JsonRelease> {
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
let mut url_string = client.build_url(&format!(
|
||||
"/api/releases/latest?asset={}&os={}&arch={}",
|
||||
asset, OS, ARCH
|
||||
asset, os, arch
|
||||
));
|
||||
cx.update(|cx| {
|
||||
if let Some(param) = ReleaseChannel::try_global(cx)
|
||||
.and_then(|release_channel| release_channel.release_query_param())
|
||||
{
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
}
|
||||
})?;
|
||||
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
}
|
||||
|
||||
let mut response = client.get(&url_string, Default::default(), true).await?;
|
||||
|
||||
@@ -413,8 +453,34 @@ impl AutoUpdater {
|
||||
.await
|
||||
.context("error reading release")?;
|
||||
|
||||
let release: JsonRelease =
|
||||
serde_json::from_slice(body.as_slice()).context("error deserializing release")?;
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
))?;
|
||||
}
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
|
||||
let (client, current_version, release_channel) = this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Checking;
|
||||
cx.notify();
|
||||
(
|
||||
this.http_client.clone(),
|
||||
this.current_version,
|
||||
ReleaseChannel::try_global(cx),
|
||||
)
|
||||
})?;
|
||||
|
||||
let release =
|
||||
Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
|
||||
|
||||
let should_download = match *RELEASE_CHANNEL {
|
||||
ReleaseChannel::Nightly => cx
|
||||
@@ -441,19 +507,21 @@ impl AutoUpdater {
|
||||
let temp_dir = tempfile::Builder::new()
|
||||
.prefix("zed-auto-update")
|
||||
.tempdir()?;
|
||||
let downloaded_asset = download_release(&temp_dir, release, &asset, client, &cx).await?;
|
||||
|
||||
let filename = match OS {
|
||||
"macos" => Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
}?;
|
||||
let downloaded_asset = temp_dir.path().join(filename);
|
||||
download_release(&downloaded_asset, release, client, &cx).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Installing;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
// We store the path of our current binary, before we install, since installation might
|
||||
// delete it. Once deleted, it's hard to get the path to our binary on Linux.
|
||||
// So we cache it here, which allows us to then restart later on.
|
||||
let binary_path = cx.update(|cx| cx.app_path())??;
|
||||
|
||||
match OS {
|
||||
let binary_path = match OS {
|
||||
"macos" => install_release_macos(&temp_dir, downloaded_asset, &cx).await,
|
||||
"linux" => install_release_linux(&temp_dir, downloaded_asset, &cx).await,
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
@@ -500,14 +568,38 @@ impl AutoUpdater {
|
||||
}
|
||||
}
|
||||
|
||||
async fn download_release(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
async fn download_remote_server_binary(
|
||||
target_path: &PathBuf,
|
||||
release: JsonRelease,
|
||||
target_filename: &str,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
let target_path = temp_dir.path().join(target_filename);
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
|
||||
let installation_id = Client::global(cx).telemetry().installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(installation_id, release_channel, telemetry)
|
||||
})?;
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry,
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
smol::io::copy(response.body_mut(), &mut target_file).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_release(
|
||||
target_path: &Path,
|
||||
release: JsonRelease,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
|
||||
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
|
||||
@@ -529,16 +621,17 @@ async fn download_release(
|
||||
smol::io::copy(response.body_mut(), &mut target_file).await?;
|
||||
log::info!("downloaded update. path:{:?}", target_path);
|
||||
|
||||
Ok(target_path)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn install_release_linux(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
downloaded_tar_gz: PathBuf,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
) -> Result<PathBuf> {
|
||||
let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?;
|
||||
let home_dir = PathBuf::from(env::var("HOME").context("no HOME env var set")?);
|
||||
let running_app_path = cx.update(|cx| cx.app_path())??;
|
||||
|
||||
let extracted = temp_dir.path().join("zed");
|
||||
fs::create_dir_all(&extracted)
|
||||
@@ -569,7 +662,16 @@ async fn install_release_linux(
|
||||
let app_folder_name = format!("zed{}.app", suffix);
|
||||
|
||||
let from = extracted.join(&app_folder_name);
|
||||
let to = home_dir.join(".local");
|
||||
let mut to = home_dir.join(".local");
|
||||
|
||||
let expected_suffix = format!("{}/libexec/zed-editor", app_folder_name);
|
||||
|
||||
if let Some(prefix) = running_app_path
|
||||
.to_str()
|
||||
.and_then(|str| str.strip_suffix(&expected_suffix))
|
||||
{
|
||||
to = PathBuf::from(prefix);
|
||||
}
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(&["-av", "--delete"])
|
||||
@@ -586,17 +688,15 @@ async fn install_release_linux(
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
Ok(to.join(expected_suffix))
|
||||
}
|
||||
|
||||
async fn install_release_macos(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
downloaded_dmg: PathBuf,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let running_app_path = ZED_APP_PATH
|
||||
.clone()
|
||||
.map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
|
||||
) -> Result<PathBuf> {
|
||||
let running_app_path = cx.update(|cx| cx.app_path())??;
|
||||
let running_app_filename = running_app_path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("invalid running app path"))?;
|
||||
@@ -644,5 +744,5 @@ async fn install_release_macos(
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
Ok(running_app_path)
|
||||
}
|
||||
|
||||
@@ -113,29 +113,30 @@ impl ToolbarItemView for Breadcrumbs {
|
||||
) -> ToolbarItemLocation {
|
||||
cx.notify();
|
||||
self.active_item = None;
|
||||
if let Some(item) = active_pane_item {
|
||||
let this = cx.view().downgrade();
|
||||
self.subscription = Some(item.subscribe_to_item_events(
|
||||
cx,
|
||||
Box::new(move |event, cx| {
|
||||
if let ItemEvent::UpdateBreadcrumbs = event {
|
||||
this.update(cx, |this, cx| {
|
||||
cx.notify();
|
||||
if let Some(active_item) = this.active_item.as_ref() {
|
||||
cx.emit(ToolbarItemEvent::ChangeLocation(
|
||||
active_item.breadcrumb_location(cx),
|
||||
))
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
));
|
||||
self.active_item = Some(item.boxed_clone());
|
||||
item.breadcrumb_location(cx)
|
||||
} else {
|
||||
ToolbarItemLocation::Hidden
|
||||
}
|
||||
|
||||
let Some(item) = active_pane_item else {
|
||||
return ToolbarItemLocation::Hidden;
|
||||
};
|
||||
|
||||
let this = cx.view().downgrade();
|
||||
self.subscription = Some(item.subscribe_to_item_events(
|
||||
cx,
|
||||
Box::new(move |event, cx| {
|
||||
if let ItemEvent::UpdateBreadcrumbs = event {
|
||||
this.update(cx, |this, cx| {
|
||||
cx.notify();
|
||||
if let Some(active_item) = this.active_item.as_ref() {
|
||||
cx.emit(ToolbarItemEvent::ChangeLocation(
|
||||
active_item.breadcrumb_location(cx),
|
||||
))
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
));
|
||||
self.active_item = Some(item.boxed_clone());
|
||||
item.breadcrumb_location(cx)
|
||||
}
|
||||
|
||||
fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext<Self>) {
|
||||
|
||||
@@ -51,4 +51,4 @@ language = { workspace = true, features = ["test-support"] }
|
||||
live_kit_client = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -493,7 +493,7 @@ impl Room {
|
||||
// we leave the room and return an error.
|
||||
if let Some(this) = this.upgrade() {
|
||||
log::info!("reconnection failed, leaving room");
|
||||
let _ = this.update(&mut cx, |this, cx| this.leave(cx))?;
|
||||
let _ = this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
|
||||
}
|
||||
Err(anyhow!(
|
||||
"can't reconnect to room: client failed to re-establish connection"
|
||||
@@ -526,7 +526,7 @@ impl Room {
|
||||
rejoined_projects.push(proto::RejoinProject {
|
||||
id: project_id,
|
||||
worktrees: project
|
||||
.worktrees()
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
proto::RejoinWorktree {
|
||||
@@ -942,7 +942,7 @@ impl Room {
|
||||
this.pending_room_update.take();
|
||||
if this.should_leave() {
|
||||
log::info!("room is empty, leaving");
|
||||
let _ = this.leave(cx);
|
||||
let _ = this.leave(cx).detach();
|
||||
}
|
||||
|
||||
this.user_store.update(cx, |user_store, cx| {
|
||||
|
||||
@@ -40,4 +40,4 @@ rpc = { workspace = true, features = ["test-support"] }
|
||||
client = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -4,7 +4,7 @@ use super::*;
|
||||
use client::{test::FakeServer, Client, UserStore};
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::{AppContext, Context, Model, SemanticVersion, TestAppContext};
|
||||
use http::FakeHttpClient;
|
||||
use http_client::FakeHttpClient;
|
||||
use rpc::proto::{self};
|
||||
use settings::SettingsStore;
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ fn main() -> Result<()> {
|
||||
|| path.starts_with("http://")
|
||||
|| path.starts_with("https://")
|
||||
|| path.starts_with("file://")
|
||||
|| path.starts_with("ssh://")
|
||||
{
|
||||
urls.push(path.to_string());
|
||||
} else {
|
||||
|
||||
@@ -18,7 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { version = "0.16", features = ["async-std", "async-native-tls"] }
|
||||
async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -26,7 +26,7 @@ feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http.workspace = true
|
||||
http_client.workspace = true
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
once_cell.workspace = true
|
||||
@@ -60,12 +60,11 @@ gpui = { workspace = true, features = ["test-support"] }
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
windows.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
cocoa.workspace = true
|
||||
isahc = { workspace = true, features = ["static-curl"] }
|
||||
async-native-tls = { version = "0.5.0", features = ["vendored"] }
|
||||
|
||||
@@ -7,8 +7,9 @@ pub mod user;
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use async_recursion::async_recursion;
|
||||
use async_tungstenite::tungstenite::{
|
||||
client::IntoClientRequest,
|
||||
error::Error as WebsocketError,
|
||||
http::{Request, StatusCode},
|
||||
http::{HeaderValue, Request, StatusCode},
|
||||
};
|
||||
use clock::SystemClock;
|
||||
use collections::HashMap;
|
||||
@@ -20,7 +21,7 @@ use futures::{
|
||||
use gpui::{
|
||||
actions, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Global, Model, Task, WeakModel,
|
||||
};
|
||||
use http::{HttpClient, HttpClientWithUrl};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
@@ -233,7 +234,9 @@ pub enum EstablishConnectionError {
|
||||
#[error("{0}")]
|
||||
Other(#[from] anyhow::Error),
|
||||
#[error("{0}")]
|
||||
Http(#[from] http::Error),
|
||||
Http(#[from] http_client::Error),
|
||||
#[error("{0}")]
|
||||
InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue),
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("{0}")]
|
||||
@@ -1159,19 +1162,24 @@ impl Client {
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let request = Request::builder()
|
||||
.header("Authorization", credentials.authorization_header())
|
||||
.header("x-zed-protocol-version", rpc::PROTOCOL_VERSION)
|
||||
.header("x-zed-app-version", app_version)
|
||||
.header(
|
||||
"x-zed-release-channel",
|
||||
release_channel.map(|r| r.dev_name()).unwrap_or("unknown"),
|
||||
);
|
||||
|
||||
let http = self.http.clone();
|
||||
let credentials = credentials.clone();
|
||||
let rpc_url = self.rpc_url(http, release_channel);
|
||||
cx.background_executor().spawn(async move {
|
||||
use HttpOrHttps::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum HttpOrHttps {
|
||||
Http,
|
||||
Https,
|
||||
}
|
||||
|
||||
let mut rpc_url = rpc_url.await?;
|
||||
let url_scheme = match rpc_url.scheme() {
|
||||
"https" => Https,
|
||||
"http" => Http,
|
||||
_ => Err(anyhow!("invalid rpc url: {}", rpc_url))?,
|
||||
};
|
||||
let rpc_host = rpc_url
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
@@ -1180,10 +1188,37 @@ impl Client {
|
||||
|
||||
log::info!("connected to rpc endpoint {}", rpc_url);
|
||||
|
||||
match rpc_url.scheme() {
|
||||
"https" => {
|
||||
rpc_url.set_scheme("wss").unwrap();
|
||||
let request = request.uri(rpc_url.as_str()).body(())?;
|
||||
rpc_url
|
||||
.set_scheme(match url_scheme {
|
||||
Https => "wss",
|
||||
Http => "ws",
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// We call `into_client_request` to let `tungstenite` construct the WebSocket request
|
||||
// for us from the RPC URL.
|
||||
//
|
||||
// Among other things, it will generate and set a `Sec-WebSocket-Key` header for us.
|
||||
let mut request = rpc_url.into_client_request()?;
|
||||
|
||||
// We then modify the request to add our desired headers.
|
||||
let request_headers = request.headers_mut();
|
||||
request_headers.insert(
|
||||
"Authorization",
|
||||
HeaderValue::from_str(&credentials.authorization_header())?,
|
||||
);
|
||||
request_headers.insert(
|
||||
"x-zed-protocol-version",
|
||||
HeaderValue::from_str(&rpc::PROTOCOL_VERSION.to_string())?,
|
||||
);
|
||||
request_headers.insert("x-zed-app-version", HeaderValue::from_str(&app_version)?);
|
||||
request_headers.insert(
|
||||
"x-zed-release-channel",
|
||||
HeaderValue::from_str(&release_channel.map(|r| r.dev_name()).unwrap_or("unknown"))?,
|
||||
);
|
||||
|
||||
match url_scheme {
|
||||
Https => {
|
||||
let (stream, _) =
|
||||
async_tungstenite::async_std::client_async_tls(request, stream).await?;
|
||||
Ok(Connection::new(
|
||||
@@ -1192,9 +1227,7 @@ impl Client {
|
||||
.sink_map_err(|error| anyhow!(error)),
|
||||
))
|
||||
}
|
||||
"http" => {
|
||||
rpc_url.set_scheme("ws").unwrap();
|
||||
let request = request.uri(rpc_url.as_str()).body(())?;
|
||||
Http => {
|
||||
let (stream, _) = async_tungstenite::client_async(request, stream).await?;
|
||||
Ok(Connection::new(
|
||||
stream
|
||||
@@ -1202,7 +1235,6 @@ impl Client {
|
||||
.sink_map_err(|error| anyhow!(error)),
|
||||
))
|
||||
}
|
||||
_ => Err(anyhow!("invalid rpc url: {}", rpc_url))?,
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1351,7 +1383,7 @@ impl Client {
|
||||
let mut url = self.rpc_url(http.clone(), None).await?;
|
||||
url.set_path("/user");
|
||||
url.set_query(Some(&format!("github_login={login}")));
|
||||
let request = Request::get(url.as_str())
|
||||
let request: http_client::Request<AsyncBody> = Request::get(url.as_str())
|
||||
.header("Authorization", format!("token {api_token}"))
|
||||
.body("".into())?;
|
||||
|
||||
@@ -1410,7 +1442,7 @@ impl Client {
|
||||
self.peer.send(self.connection_id()?, message)
|
||||
}
|
||||
|
||||
fn send_dynamic(&self, envelope: proto::Envelope) -> Result<()> {
|
||||
pub fn send_dynamic(&self, envelope: proto::Envelope) -> Result<()> {
|
||||
let connection_id = self.connection_id()?;
|
||||
self.peer.send_dynamic(connection_id, envelope)
|
||||
}
|
||||
@@ -1783,7 +1815,7 @@ mod tests {
|
||||
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::{BackgroundExecutor, Context, TestAppContext};
|
||||
use http::FakeHttpClient;
|
||||
use http_client::FakeHttpClient;
|
||||
use parking_lot::Mutex;
|
||||
use proto::TypedEnvelope;
|
||||
use settings::SettingsStore;
|
||||
|
||||
@@ -6,7 +6,7 @@ use clock::SystemClock;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::Future;
|
||||
use gpui::{AppContext, BackgroundExecutor, Task};
|
||||
use http::{self, HttpClient, HttpClientWithUrl, Method};
|
||||
use http_client::{self, HttpClient, HttpClientWithUrl, Method};
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::Mutex;
|
||||
use release_channel::ReleaseChannel;
|
||||
@@ -18,7 +18,7 @@ use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent,
|
||||
EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent,
|
||||
MemoryEvent, SettingEvent,
|
||||
MemoryEvent, ReplEvent, SettingEvent,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
#[cfg(not(debug_assertions))]
|
||||
@@ -531,6 +531,21 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_repl_event(
|
||||
self: &Arc<Self>,
|
||||
kernel_language: String,
|
||||
kernel_status: String,
|
||||
repl_session_id: String,
|
||||
) {
|
||||
let event = Event::Repl(ReplEvent {
|
||||
kernel_language,
|
||||
kernel_status,
|
||||
repl_session_id,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
fn report_event(self: &Arc<Self>, event: Event) {
|
||||
let mut state = self.state.lock();
|
||||
|
||||
@@ -632,7 +647,7 @@ impl Telemetry {
|
||||
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string());
|
||||
|
||||
let request = http::Request::builder()
|
||||
let request = http_client::Request::builder()
|
||||
.method(Method::POST)
|
||||
.uri(
|
||||
this.http_client
|
||||
@@ -661,7 +676,7 @@ mod tests {
|
||||
use chrono::TimeZone;
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::TestAppContext;
|
||||
use http::FakeHttpClient;
|
||||
use http_client::FakeHttpClient;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
|
||||
|
||||
@@ -20,7 +20,7 @@ test-support = ["sqlite"]
|
||||
[dependencies]
|
||||
anthropic.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-tungstenite = "0.16"
|
||||
async-tungstenite.workspace = true
|
||||
aws-config = { version = "1.1.5" }
|
||||
aws-sdk-s3 = { version = "1.15.0" }
|
||||
axum = { version = "0.6", features = ["json", "headers", "ws"] }
|
||||
@@ -30,12 +30,12 @@ chrono.workspace = true
|
||||
clock.workspace = true
|
||||
clickhouse.workspace = true
|
||||
collections.workspace = true
|
||||
dashmap = "5.4"
|
||||
dashmap.workspace = true
|
||||
envy = "0.4.2"
|
||||
futures.workspace = true
|
||||
google_ai.workspace = true
|
||||
hex.workspace = true
|
||||
http.workspace = true
|
||||
http_client.workspace = true
|
||||
live_kit_server.workspace = true
|
||||
log.workspace = true
|
||||
nanoid.workspace = true
|
||||
@@ -47,7 +47,7 @@ prost.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
rpc.workspace = true
|
||||
scrypt = "0.7"
|
||||
scrypt = "0.11"
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
semantic_version.workspace = true
|
||||
semver.workspace = true
|
||||
@@ -79,6 +79,7 @@ channel.workspace = true
|
||||
client = { workspace = true, features = ["test-support"] }
|
||||
collab_ui = { workspace = true, features = ["test-support"] }
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
completion = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
@@ -89,6 +90,7 @@ git_hosting_providers.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
live_kit_client = { workspace = true, features = ["test-support"] }
|
||||
lsp = { workspace = true, features = ["test-support"] }
|
||||
menu.workspace = true
|
||||
@@ -99,10 +101,13 @@ pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
recent_projects = { workspace = true }
|
||||
release_channel.workspace = true
|
||||
remote = { workspace = true, features = ["test-support"] }
|
||||
remote_server.workspace = true
|
||||
dev_server_projects.workspace = true
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
session = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
sqlx = { version = "0.7", features = ["sqlite"] }
|
||||
theme.workspace = true
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use anyhow::{anyhow, Result};
|
||||
use rpc::proto;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub fn language_model_request_to_open_ai(
|
||||
request: proto::CompleteWithLanguageModel,
|
||||
@@ -21,25 +20,7 @@ pub fn language_model_request_to_open_ai(
|
||||
proto::LanguageModelRole::LanguageModelAssistant => {
|
||||
open_ai::RequestMessage::Assistant {
|
||||
content: Some(message.content),
|
||||
tool_calls: message
|
||||
.tool_calls
|
||||
.into_iter()
|
||||
.filter_map(|call| {
|
||||
Some(open_ai::ToolCall {
|
||||
id: call.id,
|
||||
content: match call.variant? {
|
||||
proto::tool_call::Variant::Function(f) => {
|
||||
open_ai::ToolCallContent::Function {
|
||||
function: open_ai::FunctionContent {
|
||||
name: f.name,
|
||||
arguments: f.arguments,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
tool_calls: Vec::new(),
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelSystem => {
|
||||
@@ -47,12 +28,6 @@ pub fn language_model_request_to_open_ai(
|
||||
content: message.content,
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelTool => open_ai::RequestMessage::Tool {
|
||||
tool_call_id: message
|
||||
.tool_call_id
|
||||
.ok_or_else(|| anyhow!("tool message is missing tool call id"))?,
|
||||
content: message.content,
|
||||
},
|
||||
};
|
||||
|
||||
Ok(openai_message)
|
||||
@@ -61,32 +36,8 @@ pub fn language_model_request_to_open_ai(
|
||||
stream: true,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tools: request
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
Some(match tool.variant? {
|
||||
proto::chat_completion_tool::Variant::Function(f) => {
|
||||
open_ai::ToolDefinition::Function {
|
||||
function: open_ai::FunctionDefinition {
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
parameters: if let Some(params) = &f.parameters {
|
||||
Some(
|
||||
serde_json::from_str(params)
|
||||
.context("failed to deserialize tool parameters")
|
||||
.log_err()?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
tool_choice: request.tool_choice,
|
||||
tool_choice: None,
|
||||
tools: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -118,9 +69,6 @@ pub fn language_model_request_message_to_google_ai(
|
||||
proto::LanguageModelRole::LanguageModelUser => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelAssistant => google_ai::Role::Model,
|
||||
proto::LanguageModelRole::LanguageModelSystem => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelTool => {
|
||||
Err(anyhow!("we don't handle tool calls with google ai yet"))?
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod contributors;
|
||||
pub mod events;
|
||||
pub mod extensions;
|
||||
pub mod ips_file;
|
||||
@@ -5,13 +6,13 @@ pub mod slack;
|
||||
|
||||
use crate::{
|
||||
auth,
|
||||
db::{ContributorSelector, User, UserId},
|
||||
db::{User, UserId},
|
||||
rpc, AppState, Error, Result,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{self, Path, Query},
|
||||
extract::{Path, Query},
|
||||
http::{self, Request, StatusCode},
|
||||
middleware::{self, Next},
|
||||
response::IntoResponse,
|
||||
@@ -19,7 +20,6 @@ use axum::{
|
||||
Extension, Json, Router,
|
||||
};
|
||||
use axum_extra::response::ErasedJson;
|
||||
use chrono::SecondsFormat;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use tower::ServiceBuilder;
|
||||
@@ -31,8 +31,7 @@ pub fn routes(rpc_server: Option<Arc<rpc::Server>>, state: Arc<AppState>) -> Rou
|
||||
.route("/user", get(get_authenticated_user))
|
||||
.route("/users/:id/access_tokens", post(create_access_token))
|
||||
.route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
|
||||
.route("/contributors", get(get_contributors).post(add_contributor))
|
||||
.route("/contributor", get(check_is_contributor))
|
||||
.merge(contributors::router())
|
||||
.layer(
|
||||
ServiceBuilder::new()
|
||||
.layer(Extension(state))
|
||||
@@ -126,66 +125,6 @@ async fn get_rpc_server_snapshot(
|
||||
Ok(ErasedJson::pretty(rpc_server.snapshot().await))
|
||||
}
|
||||
|
||||
async fn get_contributors(Extension(app): Extension<Arc<AppState>>) -> Result<Json<Vec<String>>> {
|
||||
Ok(Json(app.db.get_contributors().await?))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CheckIsContributorParams {
|
||||
github_user_id: Option<i32>,
|
||||
github_login: Option<String>,
|
||||
}
|
||||
|
||||
impl CheckIsContributorParams {
|
||||
fn as_contributor_selector(self) -> Result<ContributorSelector> {
|
||||
if let Some(github_user_id) = self.github_user_id {
|
||||
return Ok(ContributorSelector::GitHubUserId { github_user_id });
|
||||
}
|
||||
|
||||
if let Some(github_login) = self.github_login {
|
||||
return Ok(ContributorSelector::GitHubLogin { github_login });
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"must be one of `github_user_id` or `github_login`."
|
||||
))?
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct CheckIsContributorResponse {
|
||||
signed_at: Option<String>,
|
||||
}
|
||||
|
||||
async fn check_is_contributor(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Query(params): Query<CheckIsContributorParams>,
|
||||
) -> Result<Json<CheckIsContributorResponse>> {
|
||||
let params = params.as_contributor_selector()?;
|
||||
Ok(Json(CheckIsContributorResponse {
|
||||
signed_at: app
|
||||
.db
|
||||
.get_contributor_sign_timestamp(¶ms)
|
||||
.await?
|
||||
.map(|ts| ts.and_utc().to_rfc3339_opts(SecondsFormat::Millis, true)),
|
||||
}))
|
||||
}
|
||||
|
||||
async fn add_contributor(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
extract::Json(params): extract::Json<AuthenticatedUserParams>,
|
||||
) -> Result<()> {
|
||||
let initial_channel_id = app.config.auto_join_channel_id;
|
||||
app.db
|
||||
.add_contributor(
|
||||
¶ms.github_login,
|
||||
params.github_user_id,
|
||||
params.github_email.as_deref(),
|
||||
initial_channel_id,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CreateAccessTokenQueryParams {
|
||||
public_key: String,
|
||||
|
||||
121
crates/collab/src/api/contributors.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use axum::{
|
||||
extract::{self, Query},
|
||||
routing::get,
|
||||
Extension, Json, Router,
|
||||
};
|
||||
use chrono::{NaiveDateTime, SecondsFormat};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::api::AuthenticatedUserParams;
|
||||
use crate::db::ContributorSelector;
|
||||
use crate::{AppState, Result};
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/contributors", get(get_contributors).post(add_contributor))
|
||||
.route("/contributor", get(check_is_contributor))
|
||||
}
|
||||
|
||||
async fn get_contributors(Extension(app): Extension<Arc<AppState>>) -> Result<Json<Vec<String>>> {
|
||||
Ok(Json(app.db.get_contributors().await?))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CheckIsContributorParams {
|
||||
github_user_id: Option<i32>,
|
||||
github_login: Option<String>,
|
||||
}
|
||||
|
||||
impl CheckIsContributorParams {
|
||||
fn as_contributor_selector(self) -> Result<ContributorSelector> {
|
||||
if let Some(github_user_id) = self.github_user_id {
|
||||
return Ok(ContributorSelector::GitHubUserId { github_user_id });
|
||||
}
|
||||
|
||||
if let Some(github_login) = self.github_login {
|
||||
return Ok(ContributorSelector::GitHubLogin { github_login });
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"must be one of `github_user_id` or `github_login`."
|
||||
))?
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct CheckIsContributorResponse {
|
||||
signed_at: Option<String>,
|
||||
}
|
||||
|
||||
async fn check_is_contributor(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Query(params): Query<CheckIsContributorParams>,
|
||||
) -> Result<Json<CheckIsContributorResponse>> {
|
||||
let params = params.as_contributor_selector()?;
|
||||
|
||||
if RenovateBot::is_renovate_bot(¶ms) {
|
||||
return Ok(Json(CheckIsContributorResponse {
|
||||
signed_at: Some(
|
||||
RenovateBot::created_at()
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true),
|
||||
),
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(Json(CheckIsContributorResponse {
|
||||
signed_at: app
|
||||
.db
|
||||
.get_contributor_sign_timestamp(¶ms)
|
||||
.await?
|
||||
.map(|ts| ts.and_utc().to_rfc3339_opts(SecondsFormat::Millis, true)),
|
||||
}))
|
||||
}
|
||||
|
||||
/// The Renovate bot GitHub user (`renovate[bot]`).
|
||||
///
|
||||
/// https://api.github.com/users/renovate[bot]
|
||||
struct RenovateBot;
|
||||
|
||||
impl RenovateBot {
|
||||
const LOGIN: &'static str = "renovate[bot]";
|
||||
const USER_ID: i32 = 29139614;
|
||||
|
||||
/// Returns the `created_at` timestamp for the Renovate bot user.
|
||||
fn created_at() -> &'static NaiveDateTime {
|
||||
static CREATED_AT: OnceLock<NaiveDateTime> = OnceLock::new();
|
||||
CREATED_AT.get_or_init(|| {
|
||||
chrono::DateTime::parse_from_rfc3339("2017-06-02T07:04:12Z")
|
||||
.expect("failed to parse 'created_at' for 'renovate[bot]'")
|
||||
.naive_utc()
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns whether the given contributor selector corresponds to the Renovate bot user.
|
||||
fn is_renovate_bot(contributor: &ContributorSelector) -> bool {
|
||||
match contributor {
|
||||
ContributorSelector::GitHubLogin { github_login } => github_login == Self::LOGIN,
|
||||
ContributorSelector::GitHubUserId { github_user_id } => {
|
||||
github_user_id == &Self::USER_ID
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_contributor(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
extract::Json(params): extract::Json<AuthenticatedUserParams>,
|
||||
) -> Result<()> {
|
||||
let initial_channel_id = app.config.auto_join_channel_id;
|
||||
app.db
|
||||
.add_contributor(
|
||||
¶ms.github_login,
|
||||
params.github_user_id,
|
||||
params.github_email.as_deref(),
|
||||
initial_channel_id,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -16,7 +16,7 @@ use sha2::{Digest, Sha256};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent,
|
||||
SettingEvent,
|
||||
};
|
||||
use uuid::Uuid;
|
||||
@@ -518,6 +518,13 @@ pub async fn post_events(
|
||||
checksum_matched,
|
||||
))
|
||||
}
|
||||
Event::Repl(event) => to_upload.repl_events.push(ReplEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -542,6 +549,7 @@ struct ToUpload {
|
||||
extension_events: Vec<ExtensionEventRow>,
|
||||
edit_events: Vec<EditEventRow>,
|
||||
action_events: Vec<ActionEventRow>,
|
||||
repl_events: Vec<ReplEventRow>,
|
||||
}
|
||||
|
||||
impl ToUpload {
|
||||
@@ -617,6 +625,11 @@ impl ToUpload {
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{ACTION_EVENTS_TABLE}'"))?;
|
||||
|
||||
const REPL_EVENTS_TABLE: &str = "repl_events";
|
||||
Self::upload_to_table(REPL_EVENTS_TABLE, &self.repl_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{REPL_EVENTS_TABLE}'"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -625,22 +638,24 @@ impl ToUpload {
|
||||
rows: &[T],
|
||||
clickhouse_client: &clickhouse::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
if !rows.is_empty() {
|
||||
let mut insert = clickhouse_client.insert(table)?;
|
||||
|
||||
for event in rows {
|
||||
insert.write(event).await?;
|
||||
}
|
||||
|
||||
insert.end().await?;
|
||||
|
||||
let event_count = rows.len();
|
||||
log::info!(
|
||||
"wrote {event_count} {event_specifier} to '{table}'",
|
||||
event_specifier = if event_count == 1 { "event" } else { "events" }
|
||||
);
|
||||
if rows.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut insert = clickhouse_client.insert(table)?;
|
||||
|
||||
for event in rows {
|
||||
insert.write(event).await?;
|
||||
}
|
||||
|
||||
insert.end().await?;
|
||||
|
||||
let event_count = rows.len();
|
||||
log::info!(
|
||||
"wrote {event_count} {event_specifier} to '{table}'",
|
||||
event_specifier = if event_count == 1 { "event" } else { "events" }
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1189,6 +1204,62 @@ impl ExtensionEventRow {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, clickhouse::Row)]
|
||||
pub struct ReplEventRow {
|
||||
// AppInfoBase
|
||||
app_version: String,
|
||||
major: Option<i32>,
|
||||
minor: Option<i32>,
|
||||
patch: Option<i32>,
|
||||
checksum_matched: bool,
|
||||
release_channel: String,
|
||||
os_name: String,
|
||||
os_version: String,
|
||||
|
||||
// ClientEventBase
|
||||
installation_id: Option<String>,
|
||||
session_id: Option<String>,
|
||||
is_staff: Option<bool>,
|
||||
time: i64,
|
||||
|
||||
// ReplEventRow
|
||||
kernel_language: String,
|
||||
kernel_status: String,
|
||||
repl_session_id: String,
|
||||
}
|
||||
|
||||
impl ReplEventRow {
|
||||
fn from_event(
|
||||
event: ReplEvent,
|
||||
wrapper: &EventWrapper,
|
||||
body: &EventRequestBody,
|
||||
first_event_at: chrono::DateTime<chrono::Utc>,
|
||||
checksum_matched: bool,
|
||||
) -> Self {
|
||||
let semver = body.semver();
|
||||
let time =
|
||||
first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event);
|
||||
|
||||
Self {
|
||||
app_version: body.app_version.clone(),
|
||||
major: semver.map(|v| v.major() as i32),
|
||||
minor: semver.map(|v| v.minor() as i32),
|
||||
patch: semver.map(|v| v.patch() as i32),
|
||||
checksum_matched,
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
time: time.timestamp_millis(),
|
||||
kernel_language: event.kernel_language,
|
||||
kernel_status: event.kernel_status,
|
||||
repl_session_id: event.repl_session_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, clickhouse::Row)]
|
||||
pub struct EditEventRow {
|
||||
// AppInfoBase
|
||||
|
||||
@@ -9,6 +9,7 @@ use axum::{
|
||||
middleware::Next,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use base64::prelude::*;
|
||||
use prometheus::{exponential_buckets, register_histogram, Histogram};
|
||||
pub use rpc::auth::random_token;
|
||||
use scrypt::{
|
||||
@@ -155,19 +156,27 @@ pub async fn create_access_token(
|
||||
/// protection.
|
||||
pub fn hash_access_token(token: &str) -> String {
|
||||
let digest = sha2::Sha256::digest(token);
|
||||
format!(
|
||||
"$sha256${}",
|
||||
base64::encode_config(digest, base64::URL_SAFE)
|
||||
)
|
||||
format!("$sha256${}", BASE64_URL_SAFE.encode(digest))
|
||||
}
|
||||
|
||||
/// Encrypts the given access token with the given public key to avoid leaking it on the way
|
||||
/// to the client.
|
||||
pub fn encrypt_access_token(access_token: &str, public_key: String) -> Result<String> {
|
||||
use rpc::auth::EncryptionFormat;
|
||||
|
||||
/// The encryption format to use for the access token.
|
||||
///
|
||||
/// Currently we're using the original encryption format to avoid
|
||||
/// breaking compatibility with older clients.
|
||||
///
|
||||
/// Once enough clients are capable of decrypting the newer encryption
|
||||
/// format we can start encrypting with `EncryptionFormat::V1`.
|
||||
const ENCRYPTION_FORMAT: EncryptionFormat = EncryptionFormat::V0;
|
||||
|
||||
let native_app_public_key =
|
||||
rpc::auth::PublicKey::try_from(public_key).context("failed to parse app public key")?;
|
||||
let encrypted_access_token = native_app_public_key
|
||||
.encrypt_string(access_token)
|
||||
.encrypt_string(access_token, ENCRYPTION_FORMAT)
|
||||
.context("failed to encrypt access token with public key")?;
|
||||
Ok(encrypted_access_token)
|
||||
}
|
||||
@@ -391,15 +400,16 @@ mod test {
|
||||
fn previous_hash_access_token(token: &str) -> Result<String> {
|
||||
// Avoid slow hashing in debug mode.
|
||||
let params = if cfg!(debug_assertions) {
|
||||
scrypt::Params::new(1, 1, 1).unwrap()
|
||||
scrypt::Params::new(1, 1, 1, scrypt::Params::RECOMMENDED_LEN).unwrap()
|
||||
} else {
|
||||
scrypt::Params::new(14, 8, 1).unwrap()
|
||||
scrypt::Params::new(14, 8, 1, scrypt::Params::RECOMMENDED_LEN).unwrap()
|
||||
};
|
||||
|
||||
Ok(Scrypt
|
||||
.hash_password(
|
||||
.hash_password_customized(
|
||||
token.as_bytes(),
|
||||
None,
|
||||
None,
|
||||
params,
|
||||
&SaltString::generate(thread_rng()),
|
||||
)
|
||||
|
||||
@@ -153,7 +153,7 @@ async fn main() -> Result<()> {
|
||||
let signal = async move {
|
||||
// todo(windows):
|
||||
// `ctrl_close` does not work well, because tokio's signal handler always returns soon,
|
||||
// but system termiates the application soon after returning CTRL+CLOSE handler.
|
||||
// but system terminates the application soon after returning CTRL+CLOSE handler.
|
||||
// So we should implement blocking handler to treat CTRL+CLOSE signal.
|
||||
let mut ctrl_break = tokio::signal::windows::ctrl_break()
|
||||
.expect("failed to listen for interrupt signal");
|
||||
|
||||
@@ -12,7 +12,7 @@ use crate::{
|
||||
executor::Executor,
|
||||
AppState, Error, RateLimit, RateLimiter, Result,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _};
|
||||
use anyhow::{anyhow, bail, Context as _};
|
||||
use async_tungstenite::tungstenite::{
|
||||
protocol::CloseFrame as TungsteniteCloseFrame, Message as TungsteniteMessage,
|
||||
};
|
||||
@@ -42,7 +42,7 @@ use futures::{
|
||||
stream::FuturesUnordered,
|
||||
FutureExt, SinkExt, StreamExt, TryStreamExt,
|
||||
};
|
||||
use http::IsahcHttpClient;
|
||||
use http_client::IsahcHttpClient;
|
||||
use prometheus::{register_int_gauge, IntGauge};
|
||||
use rpc::{
|
||||
proto::{
|
||||
@@ -1392,7 +1392,7 @@ pub async fn handle_websocket_request(
|
||||
let socket = socket
|
||||
.map_ok(to_tungstenite_message)
|
||||
.err_into()
|
||||
.with(|message| async move { Ok(to_axum_message(message)) });
|
||||
.with(|message| async move { to_axum_message(message) });
|
||||
let connection = Connection::new(Box::pin(socket));
|
||||
async move {
|
||||
server
|
||||
@@ -4514,7 +4514,7 @@ impl RateLimit for CompleteWithLanguageModelRateLimit {
|
||||
}
|
||||
|
||||
async fn complete_with_language_model(
|
||||
request: proto::CompleteWithLanguageModel,
|
||||
mut request: proto::CompleteWithLanguageModel,
|
||||
response: StreamingResponse<proto::CompleteWithLanguageModel>,
|
||||
session: Session,
|
||||
open_ai_api_key: Option<Arc<str>>,
|
||||
@@ -4530,18 +4530,43 @@ async fn complete_with_language_model(
|
||||
.check::<CompleteWithLanguageModelRateLimit>(session.user_id())
|
||||
.await?;
|
||||
|
||||
if request.model.starts_with("gpt") {
|
||||
let api_key =
|
||||
open_ai_api_key.ok_or_else(|| anyhow!("no OpenAI API key configured on the server"))?;
|
||||
complete_with_open_ai(request, response, session, api_key).await?;
|
||||
} else if request.model.starts_with("gemini") {
|
||||
let api_key = google_ai_api_key
|
||||
.ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?;
|
||||
complete_with_google_ai(request, response, session, api_key).await?;
|
||||
} else if request.model.starts_with("claude") {
|
||||
let api_key = anthropic_api_key
|
||||
.ok_or_else(|| anyhow!("no Anthropic AI API key configured on the server"))?;
|
||||
complete_with_anthropic(request, response, session, api_key).await?;
|
||||
let mut provider_and_model = request.model.split('/');
|
||||
let (provider, model) = match (
|
||||
provider_and_model.next().unwrap(),
|
||||
provider_and_model.next(),
|
||||
) {
|
||||
(provider, Some(model)) => (provider, model),
|
||||
(model, None) => {
|
||||
if model.starts_with("gpt") {
|
||||
("openai", model)
|
||||
} else if model.starts_with("gemini") {
|
||||
("google", model)
|
||||
} else if model.starts_with("claude") {
|
||||
("anthropic", model)
|
||||
} else {
|
||||
("unknown", model)
|
||||
}
|
||||
}
|
||||
};
|
||||
let provider = provider.to_string();
|
||||
request.model = model.to_string();
|
||||
|
||||
match provider.as_str() {
|
||||
"openai" => {
|
||||
let api_key = open_ai_api_key.context("no OpenAI API key configured on the server")?;
|
||||
complete_with_open_ai(request, response, session, api_key).await?;
|
||||
}
|
||||
"anthropic" => {
|
||||
let api_key =
|
||||
anthropic_api_key.context("no Anthropic AI API key configured on the server")?;
|
||||
complete_with_anthropic(request, response, session, api_key).await?;
|
||||
}
|
||||
"google" => {
|
||||
let api_key =
|
||||
google_ai_api_key.context("no Google AI API key configured on the server")?;
|
||||
complete_with_google_ai(request, response, session, api_key).await?;
|
||||
}
|
||||
provider => return Err(anyhow!("unknown provider {:?}", provider))?,
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -4572,37 +4597,19 @@ async fn complete_with_open_ai(
|
||||
.map(|choice| proto::LanguageModelChoiceDelta {
|
||||
index: choice.index,
|
||||
delta: Some(proto::LanguageModelResponseMessage {
|
||||
role: choice.delta.role.map(|role| match role {
|
||||
open_ai::Role::User => LanguageModelRole::LanguageModelUser,
|
||||
open_ai::Role::Assistant => LanguageModelRole::LanguageModelAssistant,
|
||||
open_ai::Role::System => LanguageModelRole::LanguageModelSystem,
|
||||
open_ai::Role::Tool => LanguageModelRole::LanguageModelTool,
|
||||
} as i32),
|
||||
role: choice.delta.role.and_then(|role| match role {
|
||||
open_ai::Role::User => {
|
||||
Some(LanguageModelRole::LanguageModelUser as i32)
|
||||
}
|
||||
open_ai::Role::Assistant => {
|
||||
Some(LanguageModelRole::LanguageModelAssistant as i32)
|
||||
}
|
||||
open_ai::Role::System => {
|
||||
Some(LanguageModelRole::LanguageModelSystem as i32)
|
||||
}
|
||||
_ => None,
|
||||
}),
|
||||
content: choice.delta.content,
|
||||
tool_calls: choice
|
||||
.delta
|
||||
.tool_calls
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|delta| proto::ToolCallDelta {
|
||||
index: delta.index as u32,
|
||||
id: delta.id,
|
||||
variant: match delta.function {
|
||||
Some(function) => {
|
||||
let name = function.name;
|
||||
let arguments = function.arguments;
|
||||
|
||||
Some(proto::tool_call_delta::Variant::Function(
|
||||
proto::tool_call_delta::FunctionCallDelta {
|
||||
name,
|
||||
arguments,
|
||||
},
|
||||
))
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
finish_reason: choice.finish_reason,
|
||||
})
|
||||
@@ -4654,8 +4661,6 @@ async fn complete_with_google_ai(
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
// Tool calls are not supported for Google
|
||||
tool_calls: Vec::new(),
|
||||
}),
|
||||
finish_reason: candidate.finish_reason.map(|reason| reason.to_string()),
|
||||
})
|
||||
@@ -4672,8 +4677,6 @@ async fn complete_with_anthropic(
|
||||
session: UserSession,
|
||||
api_key: Arc<str>,
|
||||
) -> Result<()> {
|
||||
let model = anthropic::Model::from_id(&request.model)?;
|
||||
|
||||
let mut system_message = String::new();
|
||||
let messages = request
|
||||
.messages
|
||||
@@ -4698,8 +4701,6 @@ async fn complete_with_anthropic(
|
||||
|
||||
None
|
||||
}
|
||||
// We don't yet support tool calls for Anthropic
|
||||
LanguageModelRole::LanguageModelTool => None,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -4709,7 +4710,7 @@ async fn complete_with_anthropic(
|
||||
anthropic::ANTHROPIC_API_URL,
|
||||
&api_key,
|
||||
anthropic::Request {
|
||||
model,
|
||||
model: request.model,
|
||||
messages,
|
||||
stream: true,
|
||||
system: system_message,
|
||||
@@ -4744,7 +4745,6 @@ async fn complete_with_anthropic(
|
||||
delta: Some(proto::LanguageModelResponseMessage {
|
||||
role: Some(current_role as i32),
|
||||
content: Some(text),
|
||||
tool_calls: Vec::new(),
|
||||
}),
|
||||
finish_reason: None,
|
||||
}],
|
||||
@@ -4761,7 +4761,6 @@ async fn complete_with_anthropic(
|
||||
delta: Some(proto::LanguageModelResponseMessage {
|
||||
role: Some(current_role as i32),
|
||||
content: Some(text),
|
||||
tool_calls: Vec::new(),
|
||||
}),
|
||||
finish_reason: None,
|
||||
}],
|
||||
@@ -5129,8 +5128,8 @@ async fn get_private_user_info(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn to_axum_message(message: TungsteniteMessage) -> AxumMessage {
|
||||
match message {
|
||||
fn to_axum_message(message: TungsteniteMessage) -> anyhow::Result<AxumMessage> {
|
||||
let message = match message {
|
||||
TungsteniteMessage::Text(payload) => AxumMessage::Text(payload),
|
||||
TungsteniteMessage::Binary(payload) => AxumMessage::Binary(payload),
|
||||
TungsteniteMessage::Ping(payload) => AxumMessage::Ping(payload),
|
||||
@@ -5139,7 +5138,20 @@ fn to_axum_message(message: TungsteniteMessage) -> AxumMessage {
|
||||
code: frame.code.into(),
|
||||
reason: frame.reason,
|
||||
})),
|
||||
}
|
||||
// We should never receive a frame while reading the message, according
|
||||
// to the `tungstenite` maintainers:
|
||||
//
|
||||
// > It cannot occur when you read messages from the WebSocket, but it
|
||||
// > can be used when you want to send the raw frames (e.g. you want to
|
||||
// > send the frames to the WebSocket without composing the full message first).
|
||||
// >
|
||||
// > — https://github.com/snapview/tungstenite-rs/issues/268
|
||||
TungsteniteMessage::Frame(_) => {
|
||||
bail!("received an unexpected frame while reading the message")
|
||||
}
|
||||
};
|
||||
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
fn to_tungstenite_message(message: AxumMessage) -> TungsteniteMessage {
|
||||
|
||||
@@ -16,6 +16,7 @@ mod notification_tests;
|
||||
mod random_channel_buffer_tests;
|
||||
mod random_project_collaboration_tests;
|
||||
mod randomized_test_helpers;
|
||||
mod remote_editing_collaboration_tests;
|
||||
mod test_server;
|
||||
|
||||
use language::{tree_sitter_rust, Language, LanguageConfig, LanguageMatcher};
|
||||
|
||||
@@ -52,7 +52,7 @@ async fn test_channel_guests(
|
||||
assert!(project_b.read_with(cx_b, |project, _| project.is_read_only()));
|
||||
assert!(project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
let worktree_id = project.worktrees().next().unwrap().read(cx).id();
|
||||
let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
|
||||
project.create_entry((worktree_id, "b.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
|
||||
@@ -76,7 +76,7 @@ async fn test_host_disconnect(
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
|
||||
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -1144,7 +1144,7 @@ async fn test_share_project(
|
||||
});
|
||||
|
||||
project_b.read_with(cx_b, |project, cx| {
|
||||
let worktree = project.worktrees().next().unwrap().read(cx);
|
||||
let worktree = project.worktrees(cx).next().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
|
||||
[
|
||||
@@ -1158,7 +1158,7 @@ async fn test_share_project(
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
let worktree = project.worktrees().next().unwrap();
|
||||
let worktree = project.worktrees(cx).next().unwrap();
|
||||
let entry = worktree.read(cx).entry_for_path("ignored-dir").unwrap();
|
||||
project.expand_entry(worktree_id, entry.id, cx).unwrap()
|
||||
})
|
||||
@@ -1166,7 +1166,7 @@ async fn test_share_project(
|
||||
.unwrap();
|
||||
|
||||
project_b.read_with(cx_b, |project, cx| {
|
||||
let worktree = project.worktrees().next().unwrap().read(cx);
|
||||
let worktree = project.worktrees(cx).next().unwrap().read(cx);
|
||||
assert_eq!(
|
||||
worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
|
||||
[
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#![allow(clippy::reversed_empty_ranges)]
|
||||
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
|
||||
use call::{ActiveCall, ParticipantLocation};
|
||||
use client::ChannelId;
|
||||
@@ -266,7 +267,7 @@ async fn test_basic_following(
|
||||
|
||||
// When client A activates a different editor, client B does so as well.
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.activate_item(&editor_a1, cx)
|
||||
workspace.activate_item(&editor_a1, true, true, cx)
|
||||
});
|
||||
executor.run_until_parked();
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
@@ -311,7 +312,7 @@ async fn test_basic_following(
|
||||
let editor = cx.new_view(|cx| {
|
||||
Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), true, cx)
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx);
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, cx);
|
||||
editor
|
||||
});
|
||||
executor.run_until_parked();
|
||||
@@ -401,7 +402,7 @@ async fn test_basic_following(
|
||||
workspace.unfollow(peer_id_a, cx).unwrap()
|
||||
});
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.activate_item(&editor_a2, cx)
|
||||
workspace.activate_item(&editor_a2, true, true, cx)
|
||||
});
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -466,7 +467,7 @@ async fn test_basic_following(
|
||||
|
||||
// Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.activate_item(&multibuffer_editor_b, cx)
|
||||
workspace.activate_item(&multibuffer_editor_b, true, true, cx)
|
||||
});
|
||||
executor.run_until_parked();
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
|
||||
@@ -18,7 +18,9 @@ use gpui::{
|
||||
TestAppContext, UpdateGlobal,
|
||||
};
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, Formatter, PrettierSettings},
|
||||
language_settings::{
|
||||
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
|
||||
},
|
||||
tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig,
|
||||
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
};
|
||||
@@ -1375,7 +1377,7 @@ async fn test_unshare_project(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
|
||||
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
@@ -1503,7 +1505,8 @@ async fn test_project_reconnect(
|
||||
let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await;
|
||||
let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await;
|
||||
let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await;
|
||||
let worktree_a1 = project_a1.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
|
||||
let worktree_a1 =
|
||||
project_a1.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let project1_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a1.clone(), cx))
|
||||
.await
|
||||
@@ -2306,7 +2309,7 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |p, _| p.worktrees().next().unwrap());
|
||||
let worktree_a = project_a.read_with(cx_a, |p, cx| p.worktrees(cx).next().unwrap());
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -2316,9 +2319,9 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
|
||||
let worktree_b = project_b.read_with(cx_b, |p, _| p.worktrees().next().unwrap());
|
||||
let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap());
|
||||
|
||||
let worktree_c = project_c.read_with(cx_c, |p, _| p.worktrees().next().unwrap());
|
||||
let worktree_c = project_c.read_with(cx_c, |p, cx| p.worktrees(cx).next().unwrap());
|
||||
|
||||
// Open and edit a buffer as both guests B and C.
|
||||
let buffer_b = project_b
|
||||
@@ -3020,8 +3023,8 @@ async fn test_fs_operations(
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
|
||||
let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
|
||||
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
|
||||
let entry = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
@@ -3321,7 +3324,7 @@ async fn test_local_settings(
|
||||
// As client B, join that project and observe the local settings.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
|
||||
let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
|
||||
let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
executor.run_until_parked();
|
||||
cx_b.read(|cx| {
|
||||
let store = cx.global::<SettingsStore>();
|
||||
@@ -3733,7 +3736,7 @@ async fn test_leaving_project(
|
||||
// Client B opens a buffer.
|
||||
let buffer_b1 = project_b1
|
||||
.update(cx_b, |project, cx| {
|
||||
let worktree_id = project.worktrees().next().unwrap().read(cx).id();
|
||||
let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
|
||||
project.open_buffer((worktree_id, "a.txt"), cx)
|
||||
})
|
||||
.await
|
||||
@@ -3771,7 +3774,7 @@ async fn test_leaving_project(
|
||||
|
||||
let buffer_b2 = project_b2
|
||||
.update(cx_b, |project, cx| {
|
||||
let worktree_id = project.worktrees().next().unwrap().read(cx).id();
|
||||
let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
|
||||
project.open_buffer((worktree_id, "a.txt"), cx)
|
||||
})
|
||||
.await
|
||||
@@ -4409,10 +4412,13 @@ async fn test_formatting_buffer(
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(Formatter::External {
|
||||
command: "awk".into(),
|
||||
arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
|
||||
});
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::External {
|
||||
command: "awk".into(),
|
||||
arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
|
||||
}]
|
||||
.into(),
|
||||
)));
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4493,7 +4499,7 @@ async fn test_prettier_formatting_buffer(
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(Formatter::Auto);
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
@@ -4504,7 +4510,9 @@ async fn test_prettier_formatting_buffer(
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(Formatter::LanguageServer);
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::LanguageServer { name: None }].into(),
|
||||
)));
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
@@ -4620,7 +4628,7 @@ async fn test_definition(
|
||||
.unwrap();
|
||||
cx_b.read(|cx| {
|
||||
assert_eq!(definitions_1.len(), 1);
|
||||
assert_eq!(project_b.read(cx).worktrees().count(), 2);
|
||||
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
|
||||
let target_buffer = definitions_1[0].target.buffer.read(cx);
|
||||
assert_eq!(
|
||||
target_buffer.text(),
|
||||
@@ -4649,7 +4657,7 @@ async fn test_definition(
|
||||
.unwrap();
|
||||
cx_b.read(|cx| {
|
||||
assert_eq!(definitions_2.len(), 1);
|
||||
assert_eq!(project_b.read(cx).worktrees().count(), 2);
|
||||
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
|
||||
let target_buffer = definitions_2[0].target.buffer.read(cx);
|
||||
assert_eq!(
|
||||
target_buffer.text(),
|
||||
@@ -4807,7 +4815,7 @@ async fn test_references(
|
||||
assert!(status.pending_work.is_empty());
|
||||
|
||||
assert_eq!(references.len(), 3);
|
||||
assert_eq!(project.worktrees().count(), 2);
|
||||
assert_eq!(project.worktrees(cx).count(), 2);
|
||||
|
||||
let two_buffer = references[0].buffer.read(cx);
|
||||
let three_buffer = references[2].buffer.read(cx);
|
||||
@@ -6192,7 +6200,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
||||
let project = workspace.update(cx, |workspace, _| workspace.project().clone());
|
||||
|
||||
let worktree_id = project.update(cx, |project, cx| {
|
||||
project.worktrees().next().unwrap().read(cx).id()
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
});
|
||||
|
||||
let path_1 = ProjectPath {
|
||||
|
||||
@@ -301,7 +301,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
let worktree = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees()
|
||||
.worktrees(cx)
|
||||
.filter(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
worktree.is_visible()
|
||||
@@ -423,7 +423,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
81.. => {
|
||||
let worktree = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees()
|
||||
.worktrees(cx)
|
||||
.filter(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
worktree.is_visible()
|
||||
@@ -1172,7 +1172,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
let host_worktree_snapshots =
|
||||
host_project.read_with(host_cx, |host_project, cx| {
|
||||
host_project
|
||||
.worktrees()
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
(worktree.id(), worktree.snapshot())
|
||||
@@ -1180,7 +1180,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
});
|
||||
let guest_worktree_snapshots = guest_project
|
||||
.worktrees()
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
(worktree.id(), worktree.snapshot())
|
||||
@@ -1538,7 +1538,7 @@ fn project_path_for_full_path(
|
||||
let root_name = components.next().unwrap().as_os_str().to_str().unwrap();
|
||||
let path = components.as_path().into();
|
||||
let worktree_id = project.read_with(cx, |project, cx| {
|
||||
project.worktrees().find_map(|worktree| {
|
||||
project.worktrees(cx).find_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
if worktree.root_name() == root_name {
|
||||
Some(worktree.id())
|
||||
|
||||
102
crates/collab/src/tests/remote_editing_collaboration_tests.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use crate::tests::TestServer;
|
||||
use call::ActiveCall;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use gpui::{Context as _, TestAppContext};
|
||||
use remote::SshSession;
|
||||
use remote_server::HeadlessProject;
|
||||
use serde_json::json;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_sharing_an_ssh_remote_project(
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
let executor = cx_a.executor();
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
// Set up project on remote FS
|
||||
let (client_ssh, server_ssh) = SshSession::fake(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree(
|
||||
"/code",
|
||||
json!({
|
||||
"project1": {
|
||||
"README.md": "# project 1",
|
||||
"src": {
|
||||
"lib.rs": "fn one() -> usize { 1 }"
|
||||
}
|
||||
},
|
||||
"project2": {
|
||||
"README.md": "# project 2",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let _headless_project =
|
||||
server_cx.new_model(|cx| HeadlessProject::new(server_ssh, remote_fs.clone(), cx));
|
||||
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/code/project1", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// User A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let worktree_b = project_b
|
||||
.update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx))
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
worktree_b.update(cx_b, |worktree, _cx| {
|
||||
assert_eq!(
|
||||
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
|
||||
vec![
|
||||
Path::new("README.md"),
|
||||
Path::new("src"),
|
||||
Path::new("src/lib.rs"),
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
// User B can open buffers in the remote project.
|
||||
let buffer_b = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.open_buffer((worktree_id, "src/lib.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
buffer_b.update(cx_b, |buffer, cx| {
|
||||
assert_eq!(buffer.text(), "fn one() -> usize { 1 }");
|
||||
let ix = buffer.text().find('1').unwrap();
|
||||
buffer.edit([(ix..ix + 1, "100")], None, cx);
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| project.save_buffer(buffer_b, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
remote_fs
|
||||
.load("/code/project1/src/lib.rs".as_ref())
|
||||
.await
|
||||
.unwrap(),
|
||||
"fn one() -> usize { 100 }"
|
||||
);
|
||||
}
|
||||
@@ -19,18 +19,20 @@ use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use git::GitHostingProviderRegistry;
|
||||
use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext};
|
||||
use http::FakeHttpClient;
|
||||
use http_client::FakeHttpClient;
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use notifications::NotificationStore;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
use remote::SshSession;
|
||||
use rpc::{
|
||||
proto::{self, ChannelRole},
|
||||
RECEIVE_TIMEOUT,
|
||||
};
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde_json::json;
|
||||
use session::Session;
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
cell::{Ref, RefCell, RefMut},
|
||||
@@ -155,6 +157,8 @@ impl TestServer {
|
||||
}
|
||||
|
||||
pub async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
cx.update(|cx| {
|
||||
if cx.has_global::<SettingsStore>() {
|
||||
panic!("Same cx used to create two test clients")
|
||||
@@ -263,7 +267,6 @@ impl TestServer {
|
||||
git_hosting_provider_registry
|
||||
.register_hosting_provider(Arc::new(git_hosting_providers::Github));
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
@@ -275,6 +278,7 @@ impl TestServer {
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
session: Session::test(),
|
||||
});
|
||||
|
||||
let os_keymap = "keymaps/default-macos.json";
|
||||
@@ -294,7 +298,8 @@ impl TestServer {
|
||||
menu::init();
|
||||
dev_server_projects::init(client.clone(), cx);
|
||||
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
|
||||
assistant::FakeCompletionProvider::setup_test(cx);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
completion::init(cx);
|
||||
assistant::context_store::init(&client);
|
||||
});
|
||||
|
||||
@@ -402,6 +407,7 @@ impl TestServer {
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
session: Session::test(),
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
@@ -814,6 +820,30 @@ impl TestClient {
|
||||
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
||||
}
|
||||
|
||||
pub async fn build_ssh_project(
|
||||
&self,
|
||||
root_path: impl AsRef<Path>,
|
||||
ssh: Arc<SshSession>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> (Model<Project>, WorktreeId) {
|
||||
let project = cx.update(|cx| {
|
||||
Project::ssh(
|
||||
ssh,
|
||||
self.client().clone(),
|
||||
self.app_state.node_runtime.clone(),
|
||||
self.app_state.user_store.clone(),
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (worktree, _) = project
|
||||
.update(cx, |p, cx| p.find_or_create_worktree(root_path, true, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
||||
}
|
||||
|
||||
pub async fn build_test_project(&self, cx: &mut TestAppContext) -> Model<Project> {
|
||||
self.fs()
|
||||
.insert_tree(
|
||||
|
||||
@@ -25,13 +25,14 @@ test-support = [
|
||||
"settings/test-support",
|
||||
"util/test-support",
|
||||
"workspace/test-support",
|
||||
"http/test-support",
|
||||
"http_client/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
call.workspace = true
|
||||
channel.workspace = true
|
||||
chrono.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
db.workspace = true
|
||||
@@ -77,7 +78,7 @@ pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-markdown.workspace = true
|
||||
tree-sitter-md.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -11,20 +11,22 @@ use editor::{
|
||||
EditorEvent,
|
||||
};
|
||||
use gpui::{
|
||||
actions, AnyElement, AnyView, AppContext, ClipboardItem, Entity as _, EventEmitter,
|
||||
FocusableView, IntoElement as _, Model, Pixels, Point, Render, Subscription, Task, View,
|
||||
ViewContext, VisualContext as _, WeakView, WindowContext,
|
||||
actions, AnyView, AppContext, ClipboardItem, Entity as _, EventEmitter, FocusableView, Model,
|
||||
Pixels, Point, Render, Subscription, Task, View, ViewContext, VisualContext as _, WeakView,
|
||||
WindowContext,
|
||||
};
|
||||
use project::Project;
|
||||
use rpc::proto::ChannelVisibility;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
sync::Arc,
|
||||
};
|
||||
use ui::{prelude::*, Label};
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use workspace::item::TabContentParams;
|
||||
use workspace::{item::Dedup, notifications::NotificationId};
|
||||
use workspace::{
|
||||
item::{FollowableItem, Item, ItemEvent, ItemHandle, TabContentParams},
|
||||
item::{FollowableItem, Item, ItemEvent, ItemHandle},
|
||||
searchable::SearchableItemHandle,
|
||||
ItemNavHistory, Pane, SaveIntent, Toast, ViewId, Workspace, WorkspaceId,
|
||||
};
|
||||
@@ -385,24 +387,45 @@ impl Item for ChannelView {
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
|
||||
let label = if let Some(channel) = self.channel(cx) {
|
||||
match (
|
||||
fn tab_icon(&self, cx: &WindowContext) -> Option<Icon> {
|
||||
let channel = self.channel(cx)?;
|
||||
let icon = match channel.visibility {
|
||||
ChannelVisibility::Public => IconName::Public,
|
||||
ChannelVisibility::Members => IconName::Hash,
|
||||
};
|
||||
|
||||
Some(Icon::new(icon))
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> gpui::AnyElement {
|
||||
let (channel_name, status) = if let Some(channel) = self.channel(cx) {
|
||||
let status = match (
|
||||
self.channel_buffer.read(cx).buffer().read(cx).read_only(),
|
||||
self.channel_buffer.read(cx).is_connected(),
|
||||
) {
|
||||
(false, true) => format!("#{}", channel.name),
|
||||
(true, true) => format!("#{} (read-only)", channel.name),
|
||||
(_, false) => format!("#{} (disconnected)", channel.name),
|
||||
}
|
||||
(false, true) => None,
|
||||
(true, true) => Some("read-only"),
|
||||
(_, false) => Some("disconnected"),
|
||||
};
|
||||
|
||||
(channel.name.clone(), status)
|
||||
} else {
|
||||
"channel notes (disconnected)".to_string()
|
||||
("<unknown>".into(), Some("disconnected"))
|
||||
};
|
||||
Label::new(label)
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Label::new(channel_name)
|
||||
.color(params.text_color())
|
||||
.italic(params.preview),
|
||||
)
|
||||
.when_some(status, |element, status| {
|
||||
element.child(
|
||||
Label::new(status)
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
@@ -111,6 +111,7 @@ impl ChatPanel {
|
||||
this.is_scrolled_to_bottom = !event.is_scrolled;
|
||||
}));
|
||||
|
||||
let local_offset = chrono::Local::now().offset().local_minus_utc();
|
||||
let mut this = Self {
|
||||
fs,
|
||||
client,
|
||||
@@ -120,7 +121,7 @@ impl ChatPanel {
|
||||
active_chat: Default::default(),
|
||||
pending_serialization: Task::ready(None),
|
||||
message_editor: input_editor,
|
||||
local_timezone: cx.local_timezone(),
|
||||
local_timezone: UtcOffset::from_whole_seconds(local_offset).unwrap(),
|
||||
subscriptions: Vec::new(),
|
||||
is_scrolled_to_bottom: true,
|
||||
active: false,
|
||||
@@ -1106,9 +1107,11 @@ impl Panel for ChatPanel {
|
||||
}
|
||||
|
||||
fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext<Self>) {
|
||||
settings::update_settings_file::<ChatPanelSettings>(self.fs.clone(), cx, move |settings| {
|
||||
settings.dock = Some(position)
|
||||
});
|
||||
settings::update_settings_file::<ChatPanelSettings>(
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
move |settings, _| settings.dock = Some(position),
|
||||
);
|
||||
}
|
||||
|
||||
fn size(&self, cx: &gpui::WindowContext) -> Pixels {
|
||||
|
||||
@@ -6,7 +6,7 @@ use editor::{AnchorRangeExt, CompletionProvider, Editor, EditorElement, EditorSt
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
AsyncWindowContext, FocusableView, FontStyle, FontWeight, HighlightStyle, IntoElement, Model,
|
||||
Render, Task, TextStyle, View, ViewContext, WeakView, WhiteSpace,
|
||||
Render, Task, TextStyle, View, ViewContext, WeakView,
|
||||
};
|
||||
use language::{
|
||||
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry,
|
||||
@@ -533,14 +533,12 @@ impl Render for MessageEditor {
|
||||
},
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: TextSize::Small.rems(cx).into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
div()
|
||||
|
||||
@@ -16,7 +16,7 @@ use gpui::{
|
||||
EventEmitter, FocusHandle, FocusableView, FontStyle, InteractiveElement, IntoElement,
|
||||
ListOffset, ListState, Model, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel,
|
||||
Render, SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext, VisualContext,
|
||||
WeakView, WhiteSpace,
|
||||
WeakView,
|
||||
};
|
||||
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
|
||||
use project::{Fs, Project};
|
||||
@@ -2190,14 +2190,12 @@ impl CollabPanel {
|
||||
},
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
EditorElement::new(
|
||||
@@ -2809,7 +2807,7 @@ impl Panel for CollabPanel {
|
||||
settings::update_settings_file::<CollaborationPanelSettings>(
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
move |settings| settings.dock = Some(position),
|
||||
move |settings, _| settings.dock = Some(position),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ use gpui::{
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use std::sync::Arc;
|
||||
use theme::ActiveTheme as _;
|
||||
use ui::{prelude::*, Avatar, ListItem, ListItemSpacing};
|
||||
use util::{ResultExt as _, TryFutureExt};
|
||||
use workspace::ModalView;
|
||||
|
||||
@@ -127,11 +127,12 @@ impl NotificationPanel {
|
||||
},
|
||||
));
|
||||
|
||||
let local_offset = chrono::Local::now().offset().local_minus_utc();
|
||||
let mut this = Self {
|
||||
fs,
|
||||
client,
|
||||
user_store,
|
||||
local_timezone: cx.local_timezone(),
|
||||
local_timezone: UtcOffset::from_whole_seconds(local_offset).unwrap(),
|
||||
channel_store: ChannelStore::global(cx),
|
||||
notification_store: NotificationStore::global(cx),
|
||||
notification_list,
|
||||
@@ -671,7 +672,7 @@ impl Panel for NotificationPanel {
|
||||
settings::update_settings_file::<NotificationPanelSettings>(
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
move |settings| settings.dock = Some(position),
|
||||
move |settings, _| settings.dock = Some(position),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -17,9 +17,10 @@ use gpui::{
|
||||
use picker::{Picker, PickerDelegate};
|
||||
|
||||
use postage::{sink::Sink, stream::Stream};
|
||||
use settings::Settings;
|
||||
use ui::{h_flex, prelude::*, v_flex, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing};
|
||||
use util::ResultExt;
|
||||
use workspace::{ModalView, Workspace};
|
||||
use workspace::{ModalView, Workspace, WorkspaceSettings};
|
||||
use zed_actions::OpenZedUrl;
|
||||
|
||||
actions!(command_palette, [Toggle]);
|
||||
@@ -248,9 +249,13 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
mut query: String,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> gpui::Task<()> {
|
||||
let settings = WorkspaceSettings::get_global(cx);
|
||||
if let Some(alias) = settings.command_aliases.get(&query) {
|
||||
query = alias.to_string();
|
||||
}
|
||||
let (mut tx, mut rx) = postage::dispatch::channel(1);
|
||||
let task = cx.background_executor().spawn({
|
||||
let mut commands = self.all_commands.clone();
|
||||
@@ -477,7 +482,7 @@ mod tests {
|
||||
});
|
||||
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx);
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, cx);
|
||||
editor.update(cx, |editor, cx| editor.focus(cx))
|
||||
});
|
||||
|
||||
|
||||
43
crates/completion/Cargo.toml
Normal file
@@ -0,0 +1,43 @@
|
||||
[package]
|
||||
name = "completion"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/completion.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"editor/test-support",
|
||||
"language/test-support",
|
||||
"language_model/test-support",
|
||||
"project/test-support",
|
||||
"text/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language_model.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
286
crates/completion/src/completion.rs
Normal file
@@ -0,0 +1,286 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{future::BoxFuture, stream::BoxStream, StreamExt};
|
||||
use gpui::{AppContext, Global, Model, ModelContext, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest,
|
||||
};
|
||||
use smol::lock::{Semaphore, SemaphoreGuardArc};
|
||||
use std::{pin::Pin, sync::Arc, task::Poll};
|
||||
use ui::Context;
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
let completion_provider = cx.new_model(|cx| LanguageModelCompletionProvider::new(cx));
|
||||
cx.set_global(GlobalLanguageModelCompletionProvider(completion_provider));
|
||||
}
|
||||
|
||||
struct GlobalLanguageModelCompletionProvider(Model<LanguageModelCompletionProvider>);
|
||||
|
||||
impl Global for GlobalLanguageModelCompletionProvider {}
|
||||
|
||||
pub struct LanguageModelCompletionProvider {
|
||||
active_provider: Option<Arc<dyn LanguageModelProvider>>,
|
||||
active_model: Option<Arc<dyn LanguageModel>>,
|
||||
request_limiter: Arc<Semaphore>,
|
||||
}
|
||||
|
||||
const MAX_CONCURRENT_COMPLETION_REQUESTS: usize = 4;
|
||||
|
||||
pub struct LanguageModelCompletionResponse {
|
||||
inner: BoxStream<'static, Result<String>>,
|
||||
_lock: SemaphoreGuardArc,
|
||||
}
|
||||
|
||||
impl futures::Stream for LanguageModelCompletionResponse {
|
||||
type Item = Result<String>;
|
||||
|
||||
fn poll_next(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
Pin::new(&mut self.inner).poll_next(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelCompletionProvider {
|
||||
pub fn global(cx: &AppContext) -> Model<Self> {
|
||||
cx.global::<GlobalLanguageModelCompletionProvider>()
|
||||
.0
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub fn read_global(cx: &AppContext) -> &Self {
|
||||
cx.global::<GlobalLanguageModelCompletionProvider>()
|
||||
.0
|
||||
.read(cx)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn test(cx: &mut AppContext) {
|
||||
let provider = cx.new_model(|cx| {
|
||||
let mut this = Self::new(cx);
|
||||
let available_model = LanguageModelRegistry::read_global(cx)
|
||||
.available_models(cx)
|
||||
.first()
|
||||
.unwrap()
|
||||
.clone();
|
||||
this.set_active_model(available_model, cx);
|
||||
this
|
||||
});
|
||||
cx.set_global(GlobalLanguageModelCompletionProvider(provider));
|
||||
}
|
||||
|
||||
pub fn new(cx: &mut ModelContext<Self>) -> Self {
|
||||
cx.observe(&LanguageModelRegistry::global(cx), |_, _, cx| {
|
||||
cx.notify();
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
active_provider: None,
|
||||
active_model: None,
|
||||
request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_COMPLETION_REQUESTS)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn active_provider(&self) -> Option<Arc<dyn LanguageModelProvider>> {
|
||||
self.active_provider.clone()
|
||||
}
|
||||
|
||||
pub fn set_active_provider(
|
||||
&mut self,
|
||||
provider_id: LanguageModelProviderId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.active_provider = LanguageModelRegistry::read_global(cx).provider(&provider_id);
|
||||
self.active_model = None;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn active_model(&self) -> Option<Arc<dyn LanguageModel>> {
|
||||
self.active_model.clone()
|
||||
}
|
||||
|
||||
pub fn set_active_model(&mut self, model: Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
|
||||
if self.active_model.as_ref().map_or(false, |m| {
|
||||
m.id() == model.id() && m.provider_id() == model.provider_id()
|
||||
}) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.active_provider =
|
||||
LanguageModelRegistry::read_global(cx).provider(&model.provider_id());
|
||||
self.active_model = Some(model.clone());
|
||||
|
||||
if let Some(provider) = self.active_provider.as_ref() {
|
||||
provider.load_model(model, cx);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn is_authenticated(&self, cx: &AppContext) -> bool {
|
||||
self.active_provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.is_authenticated(cx))
|
||||
}
|
||||
|
||||
pub fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
|
||||
self.active_provider
|
||||
.as_ref()
|
||||
.map_or(Task::ready(Ok(())), |provider| provider.authenticate(cx))
|
||||
}
|
||||
|
||||
pub fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
|
||||
self.active_provider
|
||||
.as_ref()
|
||||
.map_or(Task::ready(Ok(())), |provider| {
|
||||
provider.reset_credentials(cx)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn count_tokens(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> Option<BoxFuture<'static, Result<usize>>> {
|
||||
if let Some(model) = self.active_model() {
|
||||
Some(model.count_tokens(request, cx))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream_completion(
|
||||
&self,
|
||||
request: LanguageModelRequest,
|
||||
cx: &AppContext,
|
||||
) -> Task<Result<LanguageModelCompletionResponse>> {
|
||||
if let Some(language_model) = self.active_model() {
|
||||
let rate_limiter = self.request_limiter.clone();
|
||||
cx.spawn(|cx| async move {
|
||||
let lock = rate_limiter.acquire_arc().await;
|
||||
let response = language_model.stream_completion(request, &cx).await?;
|
||||
Ok(LanguageModelCompletionResponse {
|
||||
inner: response,
|
||||
_lock: lock,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("No active model set")))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn complete(&self, request: LanguageModelRequest, cx: &AppContext) -> Task<Result<String>> {
|
||||
let response = self.stream_completion(request, cx);
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let mut chunks = response.await?;
|
||||
let mut completion = String::new();
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
let chunk = chunk?;
|
||||
completion.push_str(&chunk);
|
||||
}
|
||||
Ok(completion)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use futures::StreamExt;
|
||||
use gpui::AppContext;
|
||||
use settings::SettingsStore;
|
||||
use ui::Context;
|
||||
|
||||
use crate::{
|
||||
LanguageModelCompletionProvider, LanguageModelRequest, MAX_CONCURRENT_COMPLETION_REQUESTS,
|
||||
};
|
||||
|
||||
use language_model::LanguageModelRegistry;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_rate_limiting(cx: &mut AppContext) {
|
||||
SettingsStore::test(cx);
|
||||
let fake_provider = LanguageModelRegistry::test(cx);
|
||||
|
||||
let model = LanguageModelRegistry::read_global(cx)
|
||||
.available_models(cx)
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap();
|
||||
|
||||
let provider = cx.new_model(|cx| {
|
||||
let mut provider = LanguageModelCompletionProvider::new(cx);
|
||||
provider.set_active_model(model.clone(), cx);
|
||||
provider
|
||||
});
|
||||
|
||||
let fake_model = fake_provider.test_model();
|
||||
|
||||
// Enqueue some requests
|
||||
for i in 0..MAX_CONCURRENT_COMPLETION_REQUESTS * 2 {
|
||||
let response = provider.read(cx).stream_completion(
|
||||
LanguageModelRequest {
|
||||
temperature: i as f32 / 10.0,
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
);
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut stream = response.await.unwrap();
|
||||
while let Some(message) = stream.next().await {
|
||||
message.unwrap();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
cx.background_executor().run_until_parked();
|
||||
assert_eq!(
|
||||
fake_model.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS
|
||||
);
|
||||
|
||||
// Get the first completion request that is in flight and mark it as completed.
|
||||
let completion = fake_model.pending_completions().into_iter().next().unwrap();
|
||||
fake_model.finish_completion(&completion);
|
||||
|
||||
// Ensure that the number of in-flight completion requests is reduced.
|
||||
assert_eq!(
|
||||
fake_model.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
|
||||
);
|
||||
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
// Ensure that another completion request was allowed to acquire the lock.
|
||||
assert_eq!(
|
||||
fake_model.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS
|
||||
);
|
||||
|
||||
// Mark all completion requests as finished that are in flight.
|
||||
for request in fake_model.pending_completions() {
|
||||
fake_model.finish_completion(&request);
|
||||
}
|
||||
|
||||
assert_eq!(fake_model.completion_count(), 0);
|
||||
|
||||
// Wait until the background tasks acquire the lock again.
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
fake_model.completion_count(),
|
||||
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
|
||||
);
|
||||
|
||||
// Finish all remaining completion requests.
|
||||
for request in fake_model.pending_completions() {
|
||||
fake_model.finish_completion(&request);
|
||||
}
|
||||
|
||||
cx.background_executor().run_until_parked();
|
||||
|
||||
assert_eq!(fake_model.completion_count(), 0);
|
||||
}
|
||||
}
|
||||
@@ -32,7 +32,7 @@ command_palette_hooks.workspace = true
|
||||
editor.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
menu.workspace = true
|
||||
@@ -65,4 +65,4 @@ rpc = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
theme = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -12,8 +12,8 @@ use gpui::{
|
||||
actions, AppContext, AsyncAppContext, Context, Entity, EntityId, EventEmitter, Global, Model,
|
||||
ModelContext, Task, WeakModel,
|
||||
};
|
||||
use http::github::latest_github_release;
|
||||
use http::HttpClient;
|
||||
use http_client::github::latest_github_release;
|
||||
use http_client::HttpClient;
|
||||
use language::{
|
||||
language_settings::{all_language_settings, language_settings, InlineCompletionProvider},
|
||||
point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16,
|
||||
@@ -393,7 +393,7 @@ impl Copilot {
|
||||
Default::default(),
|
||||
cx.to_async(),
|
||||
);
|
||||
let http = http::FakeHttpClient::create(|_| async { unreachable!() });
|
||||
let http = http_client::FakeHttpClient::create(|_| async { unreachable!() });
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let this = cx.new_model(|cx| Self {
|
||||
server_id: LanguageServerId(0),
|
||||
@@ -691,7 +691,7 @@ impl Copilot {
|
||||
{
|
||||
match event {
|
||||
language::Event::Edited => {
|
||||
let _ = registered_buffer.report_changes(&buffer, cx);
|
||||
drop(registered_buffer.report_changes(&buffer, cx));
|
||||
}
|
||||
language::Event::Saved => {
|
||||
server
|
||||
@@ -1236,7 +1236,7 @@ mod tests {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn to_proto(&self) -> rpc::proto::File {
|
||||
fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ use language::{
|
||||
Buffer, OffsetRangeExt, ToOffset,
|
||||
};
|
||||
use settings::Settings;
|
||||
use std::{path::Path, sync::Arc, time::Duration};
|
||||
use std::{ops::Range, path::Path, sync::Arc, time::Duration};
|
||||
|
||||
pub const COPILOT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75);
|
||||
|
||||
@@ -239,7 +239,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider {
|
||||
buffer: &Model<Buffer>,
|
||||
cursor_position: language::Anchor,
|
||||
cx: &'a AppContext,
|
||||
) -> Option<&'a str> {
|
||||
) -> Option<(&'a str, Option<Range<language::Anchor>>)> {
|
||||
let buffer_id = buffer.entity_id();
|
||||
let buffer = buffer.read(cx);
|
||||
let completion = self.active_completion()?;
|
||||
@@ -269,7 +269,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider {
|
||||
if completion_text.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(completion_text)
|
||||
Some((completion_text, None))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
@@ -333,7 +333,7 @@ mod tests {
|
||||
three
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
@@ -341,7 +341,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec!["completion_a", "completion_b"],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -375,7 +375,7 @@ mod tests {
|
||||
three
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
@@ -383,7 +383,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec![],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -408,7 +408,7 @@ mod tests {
|
||||
three
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
@@ -416,7 +416,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec!["completion_a", "completion_b"],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -590,7 +590,7 @@ mod tests {
|
||||
three
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
@@ -598,7 +598,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec![],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -632,7 +632,7 @@ mod tests {
|
||||
three
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
@@ -640,7 +640,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec![],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -889,7 +889,7 @@ mod tests {
|
||||
three
|
||||
"});
|
||||
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one
|
||||
@@ -897,7 +897,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec!["completion_a", "completion_b"],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -917,7 +917,7 @@ mod tests {
|
||||
});
|
||||
|
||||
cx.simulate_keystroke("o");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one
|
||||
@@ -925,7 +925,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec!["completion_a_2", "completion_b_2"],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
@@ -944,7 +944,7 @@ mod tests {
|
||||
});
|
||||
|
||||
cx.simulate_keystroke(".");
|
||||
let _ = handle_completion_request(
|
||||
drop(handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one
|
||||
@@ -952,7 +952,7 @@ mod tests {
|
||||
three
|
||||
"},
|
||||
vec!["something_else()"],
|
||||
);
|
||||
));
|
||||
handle_copilot_completion_request(
|
||||
&copilot_lsp,
|
||||
vec![crate::request::Completion {
|
||||
|
||||
@@ -4,13 +4,13 @@ mod toolbar_controls;
|
||||
|
||||
#[cfg(test)]
|
||||
mod diagnostics_tests;
|
||||
mod grouped_diagnostics;
|
||||
pub(crate) mod grouped_diagnostics;
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::{BTreeSet, HashSet};
|
||||
use editor::{
|
||||
diagnostic_block_renderer,
|
||||
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
|
||||
display_map::{BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock},
|
||||
highlight_diagnostic_message,
|
||||
scroll::Autoscroll,
|
||||
Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
|
||||
@@ -45,7 +45,7 @@ use ui::{h_flex, prelude::*, Icon, IconName, Label};
|
||||
use util::ResultExt;
|
||||
use workspace::{
|
||||
item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams},
|
||||
ItemNavHistory, Pane, ToolbarItemLocation, Workspace,
|
||||
ItemNavHistory, ToolbarItemLocation, Workspace,
|
||||
};
|
||||
|
||||
actions!(diagnostics, [Deploy, ToggleWarnings]);
|
||||
@@ -85,7 +85,7 @@ struct DiagnosticGroupState {
|
||||
primary_diagnostic: DiagnosticEntry<language::Anchor>,
|
||||
primary_excerpt_ix: usize,
|
||||
excerpts: Vec<ExcerptId>,
|
||||
blocks: HashSet<BlockId>,
|
||||
blocks: HashSet<CustomBlockId>,
|
||||
block_count: usize,
|
||||
}
|
||||
|
||||
@@ -237,13 +237,13 @@ impl ProjectDiagnosticsEditor {
|
||||
|
||||
fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
|
||||
if let Some(existing) = workspace.item_of_type::<ProjectDiagnosticsEditor>(cx) {
|
||||
workspace.activate_item(&existing, cx);
|
||||
workspace.activate_item(&existing, true, true, cx);
|
||||
} else {
|
||||
let workspace_handle = cx.view().downgrade();
|
||||
let diagnostics = cx.new_view(|cx| {
|
||||
ProjectDiagnosticsEditor::new(workspace.project().clone(), workspace_handle, cx)
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(diagnostics), None, cx);
|
||||
workspace.add_item_to_active_pane(Box::new(diagnostics), None, true, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -649,11 +649,7 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement {
|
||||
if self.summary.error_count == 0 && self.summary.warning_count == 0 {
|
||||
Label::new("No problems")
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
})
|
||||
.color(params.text_color())
|
||||
.into_any_element()
|
||||
} else {
|
||||
h_flex()
|
||||
@@ -663,13 +659,10 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new(self.summary.error_count.to_string()).color(
|
||||
if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
},
|
||||
)),
|
||||
.child(
|
||||
Label::new(self.summary.error_count.to_string())
|
||||
.color(params.text_color()),
|
||||
),
|
||||
)
|
||||
})
|
||||
.when(self.summary.warning_count > 0, |then| {
|
||||
@@ -677,13 +670,10 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning))
|
||||
.child(Label::new(self.summary.warning_count.to_string()).color(
|
||||
if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
},
|
||||
)),
|
||||
.child(
|
||||
Label::new(self.summary.warning_count.to_string())
|
||||
.color(params.text_color()),
|
||||
),
|
||||
)
|
||||
})
|
||||
.into_any_element()
|
||||
@@ -786,20 +776,6 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.added_to_workspace(workspace, cx));
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("diagnostics")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
project: Model<Project>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<View<Self>>> {
|
||||
Task::ready(Ok(cx.new_view(|cx| Self::new(project, workspace, cx))))
|
||||
}
|
||||
}
|
||||
|
||||
const DIAGNOSTIC_HEADER: &'static str = "diagnostic header";
|
||||
|
||||