Compare commits

..

1 Commits

Author SHA1 Message Date
Thorsten Ball
1ac6a2f5c2 linux: add profling annotations & extend example 2024-07-10 13:08:51 +02:00
843 changed files with 24517 additions and 61661 deletions

View File

@@ -12,7 +12,3 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"]
[target.aarch64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
# This cfg will reduce the size of `windows::core::Error` from 16 bytes to 4 bytes
[target.'cfg(target_os = "windows")']
rustflags = ["--cfg", "windows_slim_errors"]

View File

@@ -1,28 +0,0 @@
# .git-blame-ignore-revs
#
# This file consists of a list of commits that should be ignored for
# `git blame` purposes. This is useful for ignoring commits that only
# changed whitespace / indentation / formatting, but did not change
# the underlying syntax tree.
#
# GitHub will pick this up automatically for blame views:
# https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view
# To use this file locally, run:
# git blame --ignore-revs-file .git-blame-ignore-revs
# To always use this file by default, run:
# git config --local blame.ignoreRevsFile .git-blame-ignore-revs
# To disable this functionality, run:
# git config --local blame.ignoreRevsFile ""
# Comments are optional, but may provide helpful context.
# 2023-04-20 Set default tab_size for JSON to 2 and apply new formatting
# https://github.com/zed-industries/zed/pull/2394
eca93c124a488b4e538946cd2d313bd571aa2b86
# 2024-02-25 Format JSON files in assets/
# https://github.com/zed-industries/zed/pull/8405
ffdda588b41f7d9d270ffe76cab116f828ad545e
# 2024-07-05 Improved formatting of default keymaps (single line per bind)
# https://github.com/zed-industries/zed/pull/13887
813cc3f5e537372fc86720b5e71b6e1c815440ab

View File

@@ -27,8 +27,7 @@ body:
attributes:
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
description: |
macOS: `~/Library/Logs/Zed/Zed.log`
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
Drag Zed.log into the text input below.
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
value: |
<details><summary>Zed.log</summary><pre>

View File

@@ -10,7 +10,7 @@ runs:
cargo install cargo-nextest
- name: Install Node
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
uses: actions/setup-node@v4
with:
node-version: "18"

View File

@@ -19,7 +19,7 @@ jobs:
- test
steps:
- name: Checkout code
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.branch }}
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}

View File

@@ -30,7 +30,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
fetch-depth: 0
@@ -40,15 +40,10 @@ jobs:
- name: Check spelling
run: |
if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then
echo "Installing typos-cli@$TYPOS_CLI_VERSION..."
cargo install "typos-cli@$TYPOS_CLI_VERSION"
else
echo "typos-cli@$TYPOS_CLI_VERSION is already installed."
if ! which typos > /dev/null; then
cargo install typos-cli
fi
typos
env:
TYPOS_CLI_VERSION: "1.23.3"
- name: Run style checks
uses: ./.github/actions/check_style
@@ -90,7 +85,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -117,7 +112,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -137,18 +132,17 @@ jobs:
runs-on: hosted-windows-1
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
uses: swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: cargo clippy
# Windows can't run shell scripts, so we need to use `cargo xtask`.
run: cargo xtask clippy
run: ./script/clippy
- name: Build Zed
run: cargo build -p zed
@@ -171,12 +165,12 @@ jobs:
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Install Node
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
uses: actions/setup-node@v4
with:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
# We need to fetch more than one commit so that `script/draft-release-notes`
# is able to diff between the current and previous tag.
@@ -231,34 +225,32 @@ jobs:
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- name: Upload app bundle (universal) to workflow run if main branch or specific label
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
uses: actions/upload-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
path: target/release/Zed.dmg
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
uses: actions/upload-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
uses: actions/upload-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
- uses: softprops/action-gh-release@v1
name: Upload app bundle to release
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
target/zed-remote-server-macos-x86_64.gz
target/zed-remote-server-macos-aarch64.gz
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
target/release/Zed.dmg
@@ -281,7 +273,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -319,20 +311,19 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
uses: actions/upload-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
- name: Upload app bundle to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
uses: softprops/action-gh-release@v1
if: ${{ env.RELEASE_CHANNEL == 'preview' }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
target/zed-remote-server-linux-x86_64.gz
target/release/zed-linux-x86_64.tar.gz
files: target/release/zed-linux-x86_64.tar.gz
body: ""
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -348,11 +339,11 @@ jobs:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
- name: "Setup jq"
uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2
uses: dcarbone/install-jq-action@v2
- name: Set up Clang
run: |
@@ -360,7 +351,7 @@ jobs:
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
- uses: rui314/setup-mold@v1
with:
mold-version: 2.32.0
@@ -403,21 +394,19 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
uses: actions/upload-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
- name: Upload app bundle to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
uses: softprops/action-gh-release@v1
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
target/zed-remote-server-linux-aarch64.gz
target/release/zed-linux-aarch64.tar.gz
files: target/release/zed-linux-aarch64.tar.gz
body: ""
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -14,14 +14,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- uses: actions/checkout@v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
- uses: pnpm/action-setup@v3
with:
version: 9
- name: Setup Node
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "pnpm"

View File

@@ -12,12 +12,12 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
- name: Setup mdBook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2
uses: peaceiris/actions-mdbook@v2
with:
mdbook-version: "0.4.37"
@@ -28,28 +28,28 @@ jobs:
mdbook build ./docs --dest-dir=../target/deploy/docs/
- name: Deploy Docs
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy target/deploy --project-name=docs
- name: Deploy Install
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
- name: Deploy Docs Workers
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Deploy Install Workers
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}

View File

@@ -18,7 +18,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
fetch-depth: 0
@@ -37,7 +37,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
fetch-depth: 0
@@ -71,7 +71,7 @@ jobs:
run: doctl registry login
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -106,12 +106,10 @@ jobs:
export ZED_KUBE_NAMESPACE=production
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=10
export ZED_API_LOAD_BALANCER_SIZE_UNIT=2
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=2
elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then
export ZED_KUBE_NAMESPACE=staging
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=1
export ZED_API_LOAD_BALANCER_SIZE_UNIT=1
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=1
else
echo "cowardly refusing to deploy from an unknown branch"
exit 1
@@ -136,9 +134,3 @@ jobs:
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
export ZED_SERVICE_NAME=llm
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_LLM_LOAD_BALANCER_SIZE_UNIT
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"

View File

@@ -1,24 +0,0 @@
name: Docs
on:
pull_request:
paths:
- "docs/**"
push:
branches:
- main
jobs:
check_formatting:
name: "Check formatting"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:
version: 9
- run: pnpm dlx prettier . --check
working-directory: ./docs

View File

@@ -16,12 +16,12 @@ jobs:
- ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
uses: swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}

View File

@@ -23,12 +23,12 @@ jobs:
- randomized-tests
steps:
- name: Install Node
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
uses: actions/setup-node@v4
with:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false

View File

@@ -16,7 +16,7 @@ jobs:
fi
echo "::set-output name=URL::$URL"
- name: Get content
uses: 2428392/gh-truncate-string-action@67b1b814955634208b103cff064be3cb1c7a19be # v1.3.0
uses: 2428392/gh-truncate-string-action@v1.3.0
id: get-content
with:
stringToTruncate: |
@@ -26,7 +26,7 @@ jobs:
maxLength: 2000
truncationSymbol: "..."
- name: Discord Webhook Action
uses: tsickert/discord-webhook@c840d45a03a323fbc3f7507ac7769dbd91bfb164 # v5.3.0
uses: tsickert/discord-webhook@v5.3.0
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: ${{ steps.get-content.outputs.string }}

View File

@@ -23,7 +23,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
fetch-depth: 0
@@ -33,7 +33,6 @@ jobs:
- name: Run clippy
run: ./script/clippy
tests:
timeout-minutes: 60
name: Run tests
@@ -44,7 +43,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -69,12 +68,12 @@ jobs:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Install Node
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
uses: actions/setup-node@v4
with:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -94,9 +93,9 @@ jobs:
- name: Upload Zed Nightly
run: script/upload-nightly macos
bundle-linux-x86:
bundle-deb:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for x86
name: Create a Linux *.tar.gz bundle
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
@@ -108,7 +107,7 @@ jobs:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
uses: actions/checkout@v4
with:
clean: false
@@ -122,56 +121,8 @@ jobs:
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
bundle-linux-arm:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for ARM
if: github.repository_owner == 'zed-industries'
runs-on:
- hosted-linux-arm-1
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
with:
clean: false
- name: "Setup jq"
uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2
- name: Set up Clang
run: |
sudo apt-get update
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
with:
mold-version: 2.32.0
- name: rustup
run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
- name: Set release channel to nightly
run: |
set -euo pipefail
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Generate license file
run: script/generate-licenses
- name: Create Linux .tar.gz bundle
run: script/bundle-linux

View File

@@ -8,8 +8,8 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.11"
architecture: "x64"

View File

@@ -8,8 +8,8 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.11"
architecture: "x64"

3
.gitignore vendored
View File

@@ -29,6 +29,3 @@ DerivedData/
.vscode
.wrangler
.flatpak-builder
# Don't commit any secrets to the repo.
.env.secret.toml

View File

@@ -26,10 +26,6 @@
"tab_size": 2,
"formatter": "prettier"
},
"CSS": {
"tab_size": 2,
"formatter": "prettier"
},
"Rust": {
"tasks": {
"variables": {

2622
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,11 @@
[workspace]
resolver = "2"
members = [
"crates/activity_indicator",
"crates/anthropic",
"crates/assets",
"crates/assistant",
"crates/assistant_slash_command",
"crates/assistant_tooling",
"crates/audio",
"crates/auto_update",
"crates/breadcrumbs",
@@ -43,14 +43,13 @@ members = [
"crates/gpui_macros",
"crates/headless",
"crates/html_to_markdown",
"crates/http_client",
"crates/http",
"crates/image_viewer",
"crates/indexed_docs",
"crates/inline_completion_button",
"crates/install_cli",
"crates/journal",
"crates/language",
"crates/language_model",
"crates/language_selector",
"crates/language_tools",
"crates/languages",
@@ -80,8 +79,6 @@ members = [
"crates/refineable",
"crates/refineable/derive_refineable",
"crates/release_channel",
"crates/remote",
"crates/remote_server",
"crates/repl",
"crates/rich_text",
"crates/rope",
@@ -89,9 +86,7 @@ members = [
"crates/search",
"crates/semantic_index",
"crates/semantic_version",
"crates/session",
"crates/settings",
"crates/settings_ui",
"crates/snippet",
"crates/snippet_provider",
"crates/sqlez",
@@ -124,10 +119,6 @@ members = [
"crates/zed",
"crates/zed_actions",
#
# Extensions
#
"extensions/astro",
"extensions/clojure",
"extensions/csharp",
@@ -146,7 +137,6 @@ members = [
"extensions/php",
"extensions/prisma",
"extensions/purescript",
"extensions/ruff",
"extensions/ruby",
"extensions/snippets",
"extensions/svelte",
@@ -157,25 +147,19 @@ members = [
"extensions/vue",
"extensions/zig",
#
# Tooling
#
"tooling/xtask",
]
default-members = ["crates/zed"]
resolver = "2"
[workspace.dependencies]
#
# Workspace member crates
#
activity_indicator = { path = "crates/activity_indicator" }
ai = { path = "crates/ai" }
anthropic = { path = "crates/anthropic" }
assets = { path = "crates/assets" }
assistant = { path = "crates/assistant" }
assistant_slash_command = { path = "crates/assistant_slash_command" }
assistant_tooling = { path = "crates/assistant_tooling" }
audio = { path = "crates/audio" }
auto_update = { path = "crates/auto_update" }
breadcrumbs = { path = "crates/breadcrumbs" }
@@ -209,17 +193,15 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui" }
gpui_macros = { path = "crates/gpui_macros" }
handlebars = "4.3"
headless = { path = "crates/headless" }
html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" }
http = { path = "crates/http" }
image_viewer = { path = "crates/image_viewer" }
indexed_docs = { path = "crates/indexed_docs" }
inline_completion_button = { path = "crates/inline_completion_button" }
install_cli = { path = "crates/install_cli" }
journal = { path = "crates/journal" }
language = { path = "crates/language" }
language_model = { path = "crates/language_model" }
language_selector = { path = "crates/language_selector" }
language_tools = { path = "crates/language_tools" }
languages = { path = "crates/languages" }
@@ -248,10 +230,7 @@ project_symbols = { path = "crates/project_symbols" }
proto = { path = "crates/proto" }
quick_action_bar = { path = "crates/quick_action_bar" }
recent_projects = { path = "crates/recent_projects" }
refineable = { path = "crates/refineable" }
release_channel = { path = "crates/release_channel" }
remote = { path = "crates/remote" }
remote_server = { path = "crates/remote_server" }
repl = { path = "crates/repl" }
rich_text = { path = "crates/rich_text" }
rope = { path = "crates/rope" }
@@ -259,9 +238,7 @@ rpc = { path = "crates/rpc" }
search = { path = "crates/search" }
semantic_index = { path = "crates/semantic_index" }
semantic_version = { path = "crates/semantic_version" }
session = { path = "crates/session" }
settings = { path = "crates/settings" }
settings_ui = { path = "crates/settings_ui" }
snippet = { path = "crates/snippet" }
snippet_provider = { path = "crates/snippet_provider" }
sqlez = { path = "crates/sqlez" }
@@ -294,44 +271,37 @@ worktree = { path = "crates/worktree" }
zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
#
# External crates
#
aho-corasick = "1.1"
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "cacdb5bb3b72bad2c729227537979d95af75978f" }
any_vec = "0.14"
anyhow = "1.0.86"
ashpd = "0.9.1"
alacritty_terminal = "0.23"
any_vec = "0.13"
anyhow = "1.0.57"
ashpd = { git = "https://github.com/bilelmoussaoui/ashpd", rev = "29f2e1a" }
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-dispatcher = "0.1"
async-dispatcher = { version = "0.1" }
async-fs = "1.6"
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
async-recursion = "1.0.0"
async-tar = "0.4.2"
async-trait = "0.1"
async-tungstenite = "0.23"
async-watch = "0.3.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
base64 = "0.22"
bitflags = "2.6.0"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
blade-util = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
cargo_metadata = "0.18"
base64 = "0.13"
bitflags = "2.4.2"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
blade-util = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
cap-std = "3.0"
cargo_toml = "0.20"
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4.4", features = ["derive"] }
clickhouse = "0.11.6"
clickhouse = { version = "0.11.6" }
cocoa = "0.25"
core-foundation = "0.9.3"
core-foundation = { version = "0.9.3" }
core-foundation-sys = "0.8.6"
ctor = "0.2.6"
dashmap = "6.0"
dashmap = "5.5.3"
derive_more = "0.99.17"
dirs = "4.0"
emojis = "0.6.1"
env_logger = "0.11"
env_logger = "0.9"
exec = "0.3.1"
fork = "0.1.23"
futures = "0.3"
@@ -341,22 +311,20 @@ git2 = { version = "0.19", default-features = false }
globset = "0.4"
heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
hex = "0.4.3"
hyper = "0.14"
html5ever = "0.27.0"
ignore = "0.4.22"
image = "0.25.1"
indexmap = { version = "1.6.2", features = ["serde"] }
indoc = "2"
indoc = "1"
# We explicitly disable http2 support in isahc.
isahc = { version = "1.7.2", default-features = false, features = [
"text-decoding",
] }
itertools = "0.11.0"
jsonwebtoken = "9.3"
lazy_static = "1.4.0"
libc = "0.2"
linkify = "0.10.0"
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
markup5ever_rcdom = "0.3.0"
nanoid = "0.4"
nix = "0.28"
@@ -374,16 +342,15 @@ prost-build = "0.9"
prost-types = "0.9"
pulldown-cmark = { version = "0.10.0", default-features = false }
rand = "0.8.5"
refineable = { path = "./crates/refineable" }
regex = "1.5"
repair_json = "0.1.0"
rsa = "0.9.6"
runtimelib = { version = "0.14", default-features = false, features = [
runtimelib = { version = "0.12", default-features = false, features = [
"async-dispatcher-runtime",
] }
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
rustc-demangle = "0.1.23"
rust-embed = { version = "8.4", features = ["include-exclude"] }
schemars = {version = "0.8", features = ["impl_json_schema"]}
schemars = "0.8"
semver = "1.0"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
@@ -398,13 +365,10 @@ shellexpand = "2.1.0"
shlex = "1.3.0"
signal-hook = "0.3.17"
similar = "1.3"
simplelog = "0.12.2"
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
strsim = "0.11"
strum = { version = "0.25.0", features = ["derive"] }
subtle = "2.5.0"
sys-locale = "0.3.1"
sysinfo = "0.30.7"
tempfile = "3.9.0"
thiserror = "1.0.29"
@@ -416,32 +380,32 @@ time = { version = "0.3", features = [
"serde-well-known",
"formatting",
] }
tiny_http = "0.8"
toml = "0.8"
tokio = { version = "1", features = ["full"] }
tower-http = "0.4.4"
tree-sitter = { version = "0.22", features = ["wasm"] }
tree-sitter-bash = "0.21"
tree-sitter-c = "0.21"
tree-sitter-cpp = "0.22"
tree-sitter-css = "0.21"
tree-sitter-elixir = "0.2"
tree-sitter = { version = "0.20", features = ["wasm"] }
tree-sitter-bash = "0.20.5"
tree-sitter-c = "0.20.1"
tree-sitter-cpp = "0.20.5"
tree-sitter-css = "0.20"
tree-sitter-elixir = "0.1.1"
tree-sitter-embedded-template = "0.20.0"
tree-sitter-go = "0.21"
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "1f55029bacd0a6a11f6eb894c4312d429dcf735c", package = "tree-sitter-gomod" }
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work", rev = "dcbabff454703c3a4bc98a23cf8778d4be46fd22" }
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "6dd0303acf7138dd2b9b432a229e16539581c701" }
tree-sitter-html = "0.20"
tree-sitter-jsdoc = "0.21"
tree-sitter-json = "0.21"
tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "e3855e37f8f2c71aa7513c18a9c95fb7461b1b10" }
protols-tree-sitter-proto = "0.2"
tree-sitter-python = "0.21"
tree-sitter-regex = "0.21"
tree-sitter-ruby = "0.21"
tree-sitter-rust = "0.21"
tree-sitter-typescript = "0.21"
tree-sitter-yaml = "0.6"
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "b82ab803d887002a0af11f6ce63d72884580bf33" }
tree-sitter-gomod = "1.0.1"
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
rustc-demangle = "0.1.23"
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
tree-sitter-html = "0.19.0"
tree-sitter-jsdoc = { git = "https://github.com/tree-sitter/tree-sitter-jsdoc", rev = "6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" }
tree-sitter-json = "0.20.2"
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
tree-sitter-python = "0.20.2"
tree-sitter-regex = "0.20.0"
tree-sitter-ruby = "0.20.0"
tree-sitter-rust = "0.20.3"
tree-sitter-typescript = "0.20.5"
tree-sitter-yaml = "0.0.1"
unindent = "0.1.7"
unicase = "2.6"
unicode-segmentation = "1.10"
@@ -449,32 +413,20 @@ url = "2.2"
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
wasmparser = "0.201"
wasm-encoder = "0.201"
wasmtime = { version = "21.0.1", default-features = false, features = [
wasmtime = { version = "19.0.0", default-features = false, features = [
"async",
"demangle",
"runtime",
"cranelift",
"component-model",
] }
wasmtime-wasi = "21.0.1"
wasmtime-wasi = "19.0.0"
which = "6.0.0"
wit-component = "0.201"
[workspace.dependencies.async-stripe]
version = "0.37"
default-features = false
features = [
"runtime-tokio-hyper-rustls",
"billing",
"checkout",
"events",
# The features below are only enabled to get the `events` feature to build.
"chrono",
"connect",
]
sys-locale = "0.3.1"
[workspace.dependencies.windows]
version = "0.58"
version = "0.57"
features = [
"implement",
"Foundation_Numerics",
@@ -494,15 +446,16 @@ features = [
"Win32_Security",
"Win32_Security_Credentials",
"Win32_Storage_FileSystem",
"Win32_System_LibraryLoader",
"Win32_System_Com",
"Win32_System_Com_StructuredStorage",
"Win32_System_DataExchange",
"Win32_System_LibraryLoader",
"Win32_System_Memory",
"Win32_System_Ole",
"Win32_System_SystemInformation",
"Win32_System_SystemServices",
"Win32_System_Threading",
"Win32_System_Time",
"Win32_System_WinRT",
"Win32_UI_Controls",
"Win32_UI_HiDpi",
@@ -513,8 +466,9 @@ features = [
]
[patch.crates-io]
# Patch Tree-sitter for updated wasmtime.
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7f4a57817d58a2f134fe863674acad6bbf007228" }
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7b4894ba2ae81b988846676f54c0988d4027ef4f" }
# Workaround for a broken nightly build of gpui: See #7644 and revisit once 0.5.3 is released.
pathfinder_simd = { git = "https://github.com/servo/pathfinder.git", rev = "4968e819c0d9b015437ffc694511e175801a17c7" }
[profile.dev]
split-debuginfo = "unpacked"
@@ -564,9 +518,16 @@ single_range_in_vec_init = "allow"
# There are a bunch of rules currently failing in the `style` group, so
# allow all of those, for now.
style = { level = "allow", priority = -1 }
style = "allow"
# Individual rules that have violations in the codebase:
almost_complete_range = "allow"
arc_with_non_send_sync = "allow"
borrowed_box = "allow"
let_underscore_future = "allow"
map_entry = "allow"
non_canonical_partial_ord_impl = "allow"
reversed_empty_ranges = "allow"
type_complexity = "allow"
[workspace.metadata.cargo-machete]

View File

@@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2
FROM rust:1.80-bookworm as builder
FROM rust:1.79-bookworm as builder
WORKDIR app
COPY . .
@@ -27,7 +27,5 @@ RUN apt-get update; \
WORKDIR app
COPY --from=builder /app/collab /app/collab
COPY --from=builder /app/crates/collab/migrations /app/migrations
COPY --from=builder /app/crates/collab/migrations_llm /app/migrations_llm
ENV MIGRATIONS_PATH=/app/migrations
ENV LLM_DATABASE_MIGRATIONS_PATH=/app/migrations_llm
ENTRYPOINT ["/app/collab"]

View File

@@ -1,3 +1,3 @@
collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve all
collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve
livekit: livekit-server --dev
blob_store: ./script/run-local-minio

View File

@@ -1,4 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.43331 10.1846L6.66616 2.33334L9.89902 10.1846M3.43331 10.1846L1.9995 13.6667M3.43331 10.1846H9.89902M11.3328 13.6667L9.89902 10.1846" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M14.0613 13.647L9.34721 2.33334" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 459 B

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.8695 8.16639C14.8695 12.2258 12.0896 15.1147 7.98425 15.1147C4.04818 15.1147 0.869492 11.9361 0.869492 7.99999C0.869492 4.06393 4.04818 0.885239 7.98425 0.885239C9.90064 0.885239 11.5129 1.58811 12.7551 2.74712L10.8187 4.60901C8.28547 2.16475 3.57482 4.00081 3.57482 7.99999C3.57482 10.4816 5.5572 12.4926 7.98425 12.4926C10.8015 12.4926 11.8572 10.4729 12.0236 9.42581H7.98425V6.97868H14.7576C14.8236 7.34303 14.8695 7.69303 14.8695 8.16639Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 575 B

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.84221 15C2.80954 14.65 2.89261 13.8016 3.48621 13.208C2.85154 12.48 1.93501 10.6712 3.34621 9.26C2.17021 7.496 2.84221 4.808 5.30621 4.668C5.41821 4.14533 6.24388 3.06077 8.05828 3.06077C9.87268 3.06077 10.5818 4.14533 10.6938 4.668C13.1578 4.808 13.8298 7.496 12.6538 9.26C14.065 10.6712 13.1485 12.48 12.5138 13.208C13.1074 13.8016 13.1905 14.65 13.1578 15M3.93421 4.864C3.74755 3.51066 3.6549 1 4.83021 1C5.64221 0.999997 5.83821 2.68 6.14621 3.632M12.0658 4.864C12.2525 3.51066 12.3451 1 11.1698 1C10.3578 0.999997 10.1618 2.68 9.85379 3.632M8.05828 7.6965C7.48995 7.72052 6.28918 7.74527 6.28918 9.10927C6.28918 10.4733 7.54678 10.5621 8.05828 10.5621C8.56978 10.5621 9.71082 10.4733 9.71082 9.10927C9.71082 7.74527 8.62661 7.72052 8.05828 7.6965Z" stroke="black" stroke-width="1.25"/>
<circle cx="4.98426" cy="7.76129" r="0.666553" fill="black"/>
<circle cx="0.666553" cy="0.666553" r="0.666553" transform="matrix(-1 0 0 1 11.6823 7.09473)" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.0768 6.72994C14.3987 5.77663 14.2879 4.73232 13.7731 3.86519C12.9989 2.53519 11.4427 1.85094 9.92272 2.17294C9.24656 1.42132 8.2751 0.993879 7.25664 1C5.70301 0.996504 4.32452 1.9835 3.84655 3.44213C2.84849 3.64382 1.98699 4.26025 1.48286 5.13394C0.70294 6.46044 0.880738 8.13257 1.9227 9.27007C1.6008 10.2234 1.71164 11.2677 2.22642 12.1348C3.00057 13.4648 4.55686 14.1491 6.07679 13.8271C6.75251 14.5787 7.72441 15.0061 8.74287 14.9996C10.2974 15.0035 11.6763 14.0156 12.1543 12.5557C13.1524 12.354 14.0139 11.7376 14.518 10.8639C15.297 9.53738 15.1188 7.86657 14.0773 6.72907L14.0768 6.72994ZM8.74376 14.0848C8.12169 14.0856 7.51912 13.8708 7.0416 13.4775C7.06332 13.4661 7.10101 13.4456 7.1254 13.4307L9.95066 11.8207C10.0952 11.7398 10.1839 11.5879 10.183 11.4239V7.49382L11.377 8.17413C11.3899 8.18025 11.3983 8.1925 11.4001 8.2065V11.4611C11.3983 12.9083 10.2105 14.0817 8.74376 14.0848ZM3.03116 11.6772C2.71946 11.1461 2.60729 10.5235 2.71414 9.91932C2.73498 9.93157 2.77178 9.95388 2.79794 9.96875L5.6232 11.5788C5.76642 11.6614 5.94377 11.6614 6.08743 11.5788L9.53654 9.6135V10.9741C9.53742 10.9881 9.53077 11.0017 9.51969 11.0104L6.66383 12.6375C5.39175 13.3603 3.76719 12.9306 3.03161 11.6772H3.03116ZM2.2876 5.592C2.59797 5.06 3.08792 4.65313 3.67141 4.44182C3.67141 4.46588 3.67008 4.50832 3.67008 4.53807V7.7585C3.6692 7.92213 3.75787 8.07394 3.90198 8.15488L7.35108 10.1197L6.15704 10.8C6.14507 10.8079 6.12999 10.8092 6.11669 10.8035L3.26039 9.17513C1.99098 8.44975 1.55557 6.84719 2.28716 5.59244L2.2876 5.592ZM12.098 7.84469L8.64887 5.87944L9.84292 5.19957C9.85489 5.19169 9.86996 5.19038 9.88326 5.19607L12.7396 6.82313C14.0112 7.54807 14.447 9.15325 13.7124 10.408C13.4015 10.9391 12.912 11.346 12.329 11.5578V8.24107C12.3303 8.07744 12.2421 7.92607 12.0984 7.84469H12.098ZM13.2863 6.07982C13.2654 6.06713 13.2286 6.04525 13.2025 6.03038L10.3772 4.42038C10.234 4.33769 10.0566 4.33769 9.91297 4.42038L6.46386 6.38563V5.025C6.46298 5.011 6.46963 4.99744 6.48071 4.98869L9.33657 3.36294C10.6086 2.63888 12.235 3.06982 12.9683 4.32544C13.2783 4.85569 13.3905 5.4765 13.2854 6.07982H13.2863ZM5.81475 8.50488L4.62026 7.82457C4.6074 7.81844 4.59898 7.80619 4.59721 7.79219V4.53763C4.59809 3.08863 5.78947 1.91438 7.25797 1.91525C7.87916 1.91525 8.48039 2.1305 8.95792 2.5225C8.93619 2.53388 8.89894 2.55444 8.87412 2.56932L6.04885 4.17932C5.90431 4.26025 5.81563 4.41163 5.81652 4.57569L5.81475 8.504V8.50488ZM6.46342 7.125L7.99976 6.24957L9.53609 7.12457V8.875L7.99976 9.75L6.46342 8.875V7.125Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -1,10 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_1882_101)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M2.3125 1.875C2.07088 1.875 1.875 2.07088 1.875 2.3125V11.9375H1V2.3125C1 1.58763 1.58763 1 2.3125 1H14.0344C14.6191 1 14.9118 1.70688 14.4984 2.12029L7.27887 9.33984H9.3125V8.4375H10.1875V9.55859C10.1875 9.92103 9.89369 10.2148 9.53125 10.2148H6.40387L4.89996 11.7187H11.7187V6.25H12.5937V11.7187C12.5937 12.202 12.202 12.5937 11.7187 12.5937H4.02496L2.49371 14.125H13.6875C13.9291 14.125 14.125 13.9291 14.125 13.6875V4.0625H15V13.6875C15 14.4124 14.4124 15 13.6875 15H1.96561C1.38095 15 1.08816 14.2931 1.50157 13.8797L8.69379 6.6875H6.6875V7.5625H5.8125V6.46875C5.8125 6.10631 6.10631 5.8125 6.46875 5.8125H9.56879L11.1 4.28125H4.28125V9.75H3.40625V4.28125C3.40625 3.798 3.798 3.40625 4.28125 3.40625H11.975L13.5063 1.875H2.3125Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_1882_101">
<rect width="14" height="14" fill="white" transform="translate(1 1)"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -1,3 +0,0 @@
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.49574 4.74787L5.99574 7.25214L8.49574 4.74787" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 246 B

View File

@@ -1 +1,10 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-download"><path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"/><polyline points="7 10 12 15 17 10"/><line x1="12" x2="12" y1="15" y2="3"/></svg>
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_430_1270)">
<path d="M4.30957 0.857736V4.28922L2.35703 4.28788C2.10067 4.28788 1.86816 4.44057 1.76636 4.67656C1.66511 4.91229 1.71332 5.18633 1.88959 5.37304L5.53269 9.23312C5.77565 9.49028 6.22488 9.49028 6.46784 9.23312L10.1123 5.37277C10.2875 5.18794 10.3354 4.9147 10.2342 4.6763C10.1337 4.44057 9.90066 4.28788 9.66761 4.28788H7.73891L7.73891 0.857736C7.73891 0.383865 7.35504 2.35669e-07 6.88171 2.14979e-07L5.16731 1.4004e-07C4.66906 -0.000267757 4.30957 0.383865 4.30957 0.857736ZM11.1433 11.1187C11.1434 10.6687 10.7595 10.2856 10.2861 10.2856H1.71413C1.23972 10.2856 0.856659 10.6687 0.856659 11.1187C0.856659 11.6169 1.23972 12 1.71386 12H10.2861C10.7595 12 11.1433 11.6169 11.1433 11.1187Z" fill="white"/>
</g>
<defs>
<clipPath id="clip0_430_1270">
<rect width="12" height="12" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 347 B

After

Width:  |  Height:  |  Size: 954 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-eye"><path d="M2.062 12.348a1 1 0 0 1 0-.696 10.75 10.75 0 0 1 19.876 0 1 1 0 0 1 0 .696 10.75 10.75 0 0 1-19.876 0"/><circle cx="12" cy="12" r="3"/></svg>

Before

Width:  |  Height:  |  Size: 358 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-file-code"><path d="M10 12.5 8 15l2 2.5"/><path d="m14 12.5 2 2.5-2 2.5"/><path d="M14 2v4a2 2 0 0 0 2 2h4"/><path d="M15 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V7z"/></svg>

Before

Width:  |  Height:  |  Size: 388 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-file-text"><path d="M15 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V7Z"/><path d="M14 2v4a2 2 0 0 0 2 2h4"/><path d="M10 9H8"/><path d="M16 13H8"/><path d="M16 17H8"/></svg>

Before

Width:  |  Height:  |  Size: 384 B

View File

@@ -1,13 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_62_95)">
<path d="M4.5 6C3.67157 6 3 5.32843 3 4.5C3 3.67157 3.67157 3 4.5 3C5.32843 3 6 3.67157 6 4.5C6 5.32843 5.32843 6 4.5 6Z" stroke="white" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6.54433 13.4334C6.87775 13.5227 7.22046 13.3249 7.3098 12.9914C7.39914 12.658 7.20127 12.3153 6.86786 12.226L6.54433 13.4334ZM3.77426 6.86772L3.93603 6.26401L2.72862 5.94049L2.56686 6.54419L3.77426 6.86772ZM6.86786 12.226C4.53394 11.6006 3.14889 9.20163 3.77426 6.86772L2.56686 6.54419C1.76281 9.54494 3.54359 12.6293 6.54433 13.4334L6.86786 12.226Z" fill="white"/>
<path d="M11.5 13C10.6716 13 10 12.3284 10 11.5C10 10.6716 10.6716 10 11.5 10C12.3284 10 13 10.6716 13 11.5C13 12.3284 12.3284 13 11.5 13Z" stroke="white" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.1875 4.21113C9.88852 4.03854 9.7861 3.65629 9.95869 3.35736C10.1313 3.05843 10.5135 2.95601 10.8125 3.12859L10.1875 4.21113ZM11.7888 10.1875C12.9969 8.09496 12.28 5.41925 10.1875 4.21113L10.8125 3.12859C13.5028 4.6819 14.4246 8.12209 12.8713 10.8125L11.7888 10.1875Z" fill="white"/>
</g>
<defs>
<clipPath id="clip0_62_95">
<rect width="16" height="16" fill="white" transform="matrix(-1 0 0 1 16 0)"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1,20 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_39_129)">
<path d="M22.0209 11.9553C22.0059 10.0068 21.4219 8.10512 20.3408 6.48401" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.1001 2.18C11.355 1.93537 12.1493 1.93674 13.5027 2.10594" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M21.8198 10.1C22.0644 11.3548 22.0644 12.6451 21.8198 13.9" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M20.2898 17.6C19.5716 18.6622 18.6548 19.5757 17.5898 20.29" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.9008 21.82C12.6459 22.0644 11.6432 22.1543 10.3883 21.91" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M2.18005 13.9C1.93543 12.6451 1.93543 11.3548 2.18005 10.1" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M3.70996 6.40002C4.42822 5.33775 5.34503 4.42433 6.40996 3.71002" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M12 13C12.5523 13 13 12.5523 13 12C13 11.4477 12.5523 11 12 11C11.4477 11 11 11.4477 11 12C11 12.5523 11.4477 13 12 13Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M19 7C20.1046 7 21 6.10457 21 5C21 3.89543 20.1046 3 19 3C17.8954 3 17 3.89543 17 5C17 6.10457 17.8954 7 19 7Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 21C6.10457 21 7 20.1046 7 19C7 17.8954 6.10457 17 5 17C3.89543 17 3 17.8954 3 19C3 20.1046 3.89543 21 5 21Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M1.99072 12.0748C2.00804 14.0118 2.58758 15.9021 3.65891 17.516" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_39_129">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

View File

@@ -1,15 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_32_70)">
<path d="M19 7C20.1046 7 21 6.10457 21 5C21 3.89543 20.1046 3 19 3C17.8954 3 17 3.89543 17 5C17 6.10457 17.8954 7 19 7Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 21C6.10457 21 7 20.1046 7 19C7 17.8954 6.10457 17 5 17C3.89543 17 3 17.8954 3 19C3 20.1046 3.89543 21 5 21Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.3999 21.9C12.3227 22.2159 14.2958 21.9632 16.0769 21.173C17.858 20.3827 19.3694 19.0893 20.4254 17.4517C21.4814 15.8142 22.036 13.9037 22.021 11.9553C22.006 10.0068 21.422 8.10512 20.3409 6.48401" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.4998 2.10002C11.5849 1.8076 9.62631 2.07763 7.86198 2.87732C6.09765 3.677 4.60356 4.9719 3.56126 6.60468C2.51896 8.23745 1.97332 10.1378 1.99063 12.0748C2.00795 14.0118 2.58749 15.9021 3.65882 17.516" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10 15V9" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M14 15V9" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_32_70">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -1,14 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_32_64)">
<path d="M19 7C20.1046 7 21 6.10457 21 5C21 3.89543 20.1046 3 19 3C17.8954 3 17 3.89543 17 5C17 6.10457 17.8954 7 19 7Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 21C6.10457 21 7 20.1046 7 19C7 17.8954 6.10457 17 5 17C3.89543 17 3 17.8954 3 19C3 20.1046 3.89543 21 5 21Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.3999 21.9C12.3227 22.2159 14.2958 21.9632 16.0769 21.173C17.858 20.3827 19.3694 19.0893 20.4254 17.4517C21.4814 15.8142 22.036 13.9037 22.021 11.9553C22.006 10.0068 21.422 8.10512 20.3409 6.48401" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.4998 2.10002C11.5849 1.8076 9.62631 2.07763 7.86198 2.87732C6.09765 3.677 4.60356 4.9719 3.56126 6.60468C2.51896 8.23745 1.97332 10.1378 1.99063 12.0748C2.00795 14.0118 2.58749 15.9021 3.65882 17.516" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10 8.56055C10 8.32095 10.267 8.17803 10.4664 8.31094L15.6256 11.7504C15.8037 11.8691 15.8037 12.1309 15.6256 12.2496L10.4664 15.6891C10.267 15.822 10 15.6791 10 15.4394V8.56055Z" fill="white" stroke="white" stroke-linecap="round" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_32_64">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-route"><circle cx="6" cy="19" r="3"/><path d="M9 19h8.5a3.5 3.5 0 0 0 0-7h-11a3.5 3.5 0 0 1 0-7H15"/><circle cx="18" cy="5" r="3"/></svg>

Before

Width:  |  Height:  |  Size: 340 B

View File

@@ -1,6 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3 4H8" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6 10L11 10" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.75"/>
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.75"/>
</svg>

Before

Width:  |  Height:  |  Size: 450 B

View File

@@ -1,3 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6 6C5.69062 6.30938 4.56159 6.55977 3.51192 6.73263C3.27345 6.7719 3.27345 7.2281 3.51192 7.26737C4.56159 7.44023 5.69062 7.69062 6 8C6.30938 8.30938 6.55977 9.43841 6.73263 10.4881C6.7719 10.7266 7.2281 10.7266 7.26737 10.4881C7.44023 9.43841 7.69062 8.30938 8 8C8.30938 7.69062 9.43841 7.44023 10.4881 7.26737C10.7266 7.2281 10.7266 6.7719 10.4881 6.73263C9.43841 6.55977 8.30938 6.30938 8 6C7.69062 5.69062 7.44023 4.56159 7.26737 3.51192C7.2281 3.27345 6.7719 3.27345 6.73263 3.51192C6.55977 4.56159 6.30938 5.69062 6 6Z" stroke="black" stroke-width="1.25" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 700 B

View File

@@ -1,3 +1,3 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4 9.8V4.2C4 4.08954 4.08954 4 4.2 4H9.8C9.91046 4 10 4.08954 10 4.2V9.8C10 9.91046 9.91046 10 9.8 10H4.2C4.08954 10 4 9.91046 4 9.8Z" stroke="#C56757" stroke-width="1.25" stroke-linejoin="round"/>
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.88889 1H2.11111C1.49746 1 1 1.49746 1 2.11111V9.88889C1 10.5025 1.49746 11 2.11111 11H9.88889C10.5025 11 11 10.5025 11 9.88889V2.11111C11 1.49746 10.5025 1 9.88889 1Z" stroke="#C56757" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 310 B

After

Width:  |  Height:  |  Size: 369 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-search"><path d="M21 6H3"/><path d="M10 12H3"/><path d="M10 18H3"/><circle cx="17" cy="15" r="3"/><path d="m21 19-1.9-1.9"/></svg>

Before

Width:  |  Height:  |  Size: 338 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-undo"><path d="M3 7v6h6"/><path d="M21 17a9 9 0 0 0-9-9 9 9 0 0 0-6 2.3L3 13"/></svg>

Before

Width:  |  Height:  |  Size: 288 B

View File

@@ -2,11 +2,13 @@
// Standard Linux bindings
{
"bindings": {
"up": "menu::SelectPrev",
"shift-tab": "menu::SelectPrev",
"home": "menu::SelectFirst",
"pageup": "menu::SelectFirst",
"shift-pageup": "menu::SelectFirst",
"ctrl-p": "menu::SelectPrev",
"down": "menu::SelectNext",
"tab": "menu::SelectNext",
"end": "menu::SelectLast",
"pagedown": "menu::SelectLast",
@@ -17,6 +19,7 @@
"escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"ctrl-shift-w": "workspace::CloseWindow",
@@ -31,20 +34,6 @@
"f11": "zed::ToggleFullScreen"
}
},
{
"context": "Picker || menu",
"bindings": {
"up": "menu::SelectPrev",
"down": "menu::SelectNext"
}
},
{
"context": "Prompt",
"bindings": {
"left": "menu::SelectPrev",
"right": "menu::SelectNext"
}
},
{
"context": "Editor",
"bindings": {
@@ -52,6 +41,7 @@
"backspace": "editor::Backspace",
"shift-backspace": "editor::Backspace",
"delete": "editor::Delete",
"ctrl-d": "editor::Delete",
"tab": "editor::Tab",
"shift-tab": "editor::TabPrev",
"ctrl-k": "editor::CutToEndOfLine",
@@ -89,8 +79,10 @@
"shift-down": "editor::SelectDown",
"shift-left": "editor::SelectLeft",
"shift-right": "editor::SelectRight",
"ctrl-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
"ctrl-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::AddSelectionAbove",
"ctrl-shift-down": "editor::AddSelectionBelow",
"ctrl-shift-home": "editor::SelectToBeginning",
"ctrl-shift-end": "editor::SelectToEnd",
"ctrl-a": "editor::SelectAll",
@@ -108,7 +100,6 @@
"ctrl-k ctrl-r": "editor::RevertSelectedHunks",
"ctrl-'": "editor::ToggleHunkDiff",
"ctrl-\"": "editor::ExpandAllHunkDiffs",
"ctrl-i": "editor::ShowSignatureHelp",
"alt-g b": "editor::ToggleGitBlame"
}
},
@@ -117,8 +108,8 @@
"bindings": {
"enter": "editor::Newline",
"shift-enter": "editor::Newline",
"ctrl-enter": "editor::NewlineAbove",
"ctrl-shift-enter": "editor::NewlineBelow",
"ctrl-enter": "editor::NewlineAbove",
"alt-z": "editor::ToggleSoftWrap",
"ctrl-f": "buffer_search::Deploy",
"ctrl-h": ["buffer_search::Deploy", { "replace_enabled": true }],
@@ -133,7 +124,7 @@
"bindings": {
"alt-]": "editor::NextInlineCompletion",
"alt-[": "editor::PreviousInlineCompletion",
"ctrl-right": "editor::AcceptPartialInlineCompletion"
"alt-right": "editor::AcceptPartialInlineCompletion"
}
},
{
@@ -161,9 +152,7 @@
"bindings": {
"ctrl-g": "search::SelectNextMatch",
"ctrl-shift-g": "search::SelectPrevMatch",
"alt-m": "assistant::ToggleModelSelector",
"ctrl-k h": "assistant::DeployHistory",
"ctrl-k l": "assistant::DeployPromptLibrary"
"alt-m": "assistant::ToggleModelSelector"
}
},
{
@@ -218,7 +207,7 @@
}
},
{
"context": "ProjectSearchBar && in_replace > Editor",
"context": "ProjectSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"ctrl-alt-enter": "search::ReplaceAll"
@@ -258,26 +247,33 @@
"ctrl-alt-shift-x": "search::ToggleRegex"
}
},
{
"context": "Terminal",
"bindings": {
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"]
}
},
// Bindings from VS Code
{
"context": "Editor",
"bindings": {
"ctrl-[": "editor::Outdent",
"ctrl-]": "editor::Indent",
"shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above
"shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below
"shift-alt-up": "editor::AddSelectionAbove",
"shift-alt-down": "editor::AddSelectionBelow",
"ctrl-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"ctrl-alt-shift-up": "editor::DuplicateLineUp",
"ctrl-alt-shift-down": "editor::DuplicateLineDown",
"alt-shift-right": "editor::SelectLargerSyntaxNode", // Expand Selection
"alt-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-down": "editor::SelectSmallerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }],
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }],
"ctrl-shift-l": "editor::SelectAllMatches",
"ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": false }],
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }],
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }],
"ctrl-k ctrl-i": "editor::Hover",
@@ -330,7 +326,7 @@
"alt-9": ["pane::ActivateItem", 8],
"alt-0": "pane::ActivateLastItem",
"ctrl-alt--": "pane::GoBack",
"ctrl-alt-_": "pane::GoForward",
"ctrl-alt-shift--": "pane::GoForward",
"ctrl-shift-t": "pane::ReopenClosedItem",
"f3": "search::SelectNextMatch",
"shift-f3": "search::SelectPrevMatch",
@@ -401,7 +397,7 @@
"bindings": {
"ctrl-shift-k": "editor::DeleteLine",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-shift-j": "editor::JoinLines",
"ctrl-j": "editor::JoinLines",
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-delete": "editor::DeleteToNextSubwordEnd",
@@ -467,16 +463,12 @@
{
"bindings": {
"ctrl-alt-shift-f": "workspace::FollowNextCollaborator",
// TODO: Move this to a dock open action
"ctrl-shift-c": "collab_panel::ToggleFocus",
"ctrl-alt-i": "zed::DebugElements",
"ctrl-:": "editor::ToggleInlayHints"
}
},
{
"context": "!Terminal",
"bindings": {
"ctrl-shift-c": "collab_panel::ToggleFocus"
}
},
{
"context": "Editor && mode == full",
"bindings": {
@@ -488,13 +480,6 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {
"ctrl-shift-enter": "repl::Run",
"ctrl-alt-enter": "repl::RunInPlace"
}
},
{
"context": "ContextEditor > Editor",
"bindings": {
@@ -581,13 +566,6 @@
"tab": "channel_modal::ToggleMode"
}
},
{
"context": "Picker > Editor",
"bindings": {
"tab": "picker::ConfirmCompletion",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }]
}
},
{
"context": "ChannelModal > Picker > Editor",
"bindings": {
@@ -611,14 +589,11 @@
"context": "Terminal",
"bindings": {
"ctrl-alt-space": "terminal::ShowCharacterPalette",
"ctrl-shift-c": "terminal::Copy",
"shift-ctrl-c": "terminal::Copy",
"ctrl-insert": "terminal::Copy",
// "ctrl-a": "editor::SelectAll", // conflicts with readline
"ctrl-shift-v": "terminal::Paste",
"shift-ctrl-v": "terminal::Paste",
"shift-insert": "terminal::Paste",
"ctrl-enter": "assistant::InlineAssist",
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
"up": ["terminal::SendKeystroke", "up"],
"pageup": ["terminal::SendKeystroke", "pageup"],
"down": ["terminal::SendKeystroke", "down"],

View File

@@ -83,7 +83,7 @@
"ctrl-n": "editor::MoveDown",
"ctrl-b": "editor::MoveLeft",
"ctrl-f": "editor::MoveRight",
"ctrl-l": "editor::ScrollCursorCenter",
"ctrl-l": "editor::NextScreen",
"alt-left": "editor::MoveToPreviousWordStart",
"alt-b": "editor::MoveToPreviousWordStart",
"alt-right": "editor::MoveToNextWordEnd",
@@ -102,9 +102,9 @@
"ctrl-shift-b": "editor::SelectLeft",
"shift-right": "editor::SelectRight",
"ctrl-shift-f": "editor::SelectRight",
"alt-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
"alt-shift-left": "editor::SelectToPreviousWordStart",
"alt-shift-b": "editor::SelectToPreviousWordStart",
"alt-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
"alt-shift-right": "editor::SelectToNextWordEnd",
"alt-shift-f": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectToStartOfParagraph",
"ctrl-shift-down": "editor::SelectToEndOfParagraph",
@@ -126,10 +126,7 @@
"cmd-alt-z": "editor::RevertSelectedHunks",
"cmd-'": "editor::ToggleHunkDiff",
"cmd-\"": "editor::ExpandAllHunkDiffs",
"cmd-alt-g b": "editor::ToggleGitBlame",
"cmd-i": "editor::ShowSignatureHelp",
"ctrl-f12": "editor::GoToDeclaration",
"alt-ctrl-f12": "editor::GoToDeclarationSplit"
"cmd-alt-g b": "editor::ToggleGitBlame"
}
},
{
@@ -137,7 +134,6 @@
"bindings": {
"enter": "editor::Newline",
"shift-enter": "editor::Newline",
"cmd-enter": "editor::NewlineBelow",
"cmd-shift-enter": "editor::NewlineAbove",
"alt-z": "editor::ToggleSoftWrap",
"cmd-f": "buffer_search::Deploy",
@@ -149,12 +145,18 @@
"cmd-alt-e": "editor::SelectEnclosingSymbol"
}
},
{
"context": "Editor && mode == full && !jupyter",
"bindings": {
"cmd-enter": "editor::NewlineBelow"
}
},
{
"context": "Editor && mode == full && inline_completion",
"bindings": {
"alt-]": "editor::NextInlineCompletion",
"alt-[": "editor::PreviousInlineCompletion",
"cmd-right": "editor::AcceptPartialInlineCompletion"
"alt-right": "editor::AcceptPartialInlineCompletion"
}
},
{
@@ -177,21 +179,12 @@
"cmd-c": "markdown::Copy"
}
},
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {
"ctrl-shift-enter": "repl::Run",
"ctrl-alt-enter": "repl::RunInPlace"
}
},
{
"context": "AssistantPanel",
"bindings": {
"cmd-g": "search::SelectNextMatch",
"cmd-shift-g": "search::SelectPrevMatch",
"alt-m": "assistant::ToggleModelSelector",
"cmd-k h": "assistant::DeployHistory",
"cmd-k l": "assistant::DeployPromptLibrary"
"alt-m": "assistant::ToggleModelSelector"
}
},
{
@@ -261,7 +254,7 @@
}
},
{
"context": "ProjectSearchBar && in_replace > Editor",
"context": "ProjectSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"cmd-enter": "search::ReplaceAll"
@@ -298,6 +291,7 @@
"alt-cmd-c": "search::ToggleCaseSensitive",
"alt-cmd-w": "search::ToggleWholeWord",
"alt-cmd-f": "project_search::ToggleFilters",
"alt-cmd-g": "search::ToggleRegex",
"alt-cmd-x": "search::ToggleRegex"
}
},
@@ -307,20 +301,19 @@
"bindings": {
"cmd-[": "editor::Outdent",
"cmd-]": "editor::Indent",
"cmd-alt-up": "editor::AddSelectionAbove", // Insert cursor above
"cmd-alt-up": "editor::AddSelectionAbove",
"cmd-ctrl-p": "editor::AddSelectionAbove",
"cmd-alt-down": "editor::AddSelectionBelow", // Insert cursor below
"cmd-alt-down": "editor::AddSelectionBelow",
"cmd-ctrl-n": "editor::AddSelectionBelow",
"cmd-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"alt-shift-up": "editor::DuplicateLineUp",
"alt-shift-down": "editor::DuplicateLineDown",
"ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand Selection
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
"cmd-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
"cmd-f2": "editor::SelectAllMatches", // Select all occurrences of current word
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"cmd-d": ["editor::SelectNext", { "replace_newest": false }],
"cmd-shift-l": "editor::SelectAllMatches",
"ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": false }],
"cmd-k cmd-d": ["editor::SelectNext", { "replace_newest": true }],
"cmd-k ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": true }],
@@ -575,6 +568,12 @@
"space": "project_panel::Open"
}
},
{
"context": "Editor && jupyter",
"bindings": {
"cmd-enter": "repl::Run"
}
},
{
"context": "CollabPanel && not_editing",
"bindings": {
@@ -594,14 +593,6 @@
"tab": "channel_modal::ToggleMode"
}
},
{
"context": "Picker > Editor",
"bindings": {
"tab": "picker::ConfirmCompletion",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }]
}
},
{
"context": "ChannelModal > Picker > Editor",
"bindings": {
@@ -621,13 +612,20 @@
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
}
},
{
"context": "Picker",
"bindings": {
"f2": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }]
}
},
{
"context": "Terminal",
"bindings": {
"ctrl-cmd-space": "terminal::ShowCharacterPalette",
"cmd-c": "terminal::Copy",
"cmd-v": "terminal::Paste",
"cmd-a": "editor::SelectAll",
"cmd-k": "terminal::Clear",
"ctrl-enter": "assistant::InlineAssist",
// Some nice conveniences

View File

@@ -1,21 +0,0 @@
// Zed keymap
//
// For information on binding keys, see the Zed
// documentation: https://zed.dev/docs/key-bindings
//
// To see the default key bindings run `zed: open default keymap`
// from the command palette.
[
{
"context": "Workspace",
"bindings": {
// "shift shift": "file_finder::Toggle"
}
},
{
"context": "Editor",
"bindings": {
// "j k": ["workspace::SendKeystrokes", "escape"]
}
}
]

View File

@@ -25,7 +25,6 @@
"ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines
"ctrl-up": "editor::MoveLineUp", // editor:move-line-up
"ctrl-down": "editor::MoveLineDown", // editor:move-line-down
"ctrl-\\": "workspace::ToggleLeftDock", // tree-view:toggle
"ctrl-shift-m": "markdown::OpenPreviewToTheSide" // markdown-preview:toggle
}
},

View File

@@ -13,7 +13,6 @@
"ctrl-shift-j": "editor::JoinLines",
"ctrl-d": "editor::DuplicateLineDown",
"ctrl-y": "editor::DeleteLine",
"ctrl-m": "editor::ScrollCursorCenter",
"ctrl-pagedown": "editor::MovePageDown",
"ctrl-pageup": "editor::MovePageUp",
// "ctrl-alt-shift-b": "editor::SelectToPreviousWordStart",

View File

@@ -3,17 +3,17 @@
"bindings": {
"ctrl-shift-[": "pane::ActivatePrevItem",
"ctrl-shift-]": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePrevItem"
"ctrl-pagedown": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivatePrevItem"
}
},
{
"context": "Editor",
"bindings": {
"ctrl-shift-up": "editor::MoveLineUp",
"ctrl-shift-down": "editor::MoveLineDown",
"ctrl-shift-up": "editor::AddSelectionAbove",
"ctrl-shift-down": "editor::AddSelectionBelow",
"ctrl-shift-m": "editor::SelectLargerSyntaxNode",
"ctrl-shift-l": "editor::SplitSelectionIntoLines",
"ctrl-shift-a": "editor::SelectLargerSyntaxNode",
@@ -24,8 +24,6 @@
"ctrl-shift-f12": "editor::FindAllReferences",
"ctrl-.": "editor::GoToHunk",
"ctrl-,": "editor::GoToPrevHunk",
"ctrl-k ctrl-u": "editor::ConvertToUpperCase",
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
"shift-alt-m": "markdown::OpenPreviewToTheSide",
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
"ctrl-delete": "editor::DeleteToNextWordEnd"

View File

@@ -26,7 +26,6 @@
"cmd-shift-d": "editor::DuplicateLineDown",
"ctrl-cmd-up": "editor::MoveLineUp",
"ctrl-cmd-down": "editor::MoveLineDown",
"cmd-\\": "workspace::ToggleLeftDock",
"ctrl-shift-m": "markdown::OpenPreviewToTheSide"
}
},

View File

@@ -3,10 +3,10 @@
"bindings": {
"cmd-shift-[": "pane::ActivatePrevItem",
"cmd-shift-]": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePrevItem"
"ctrl-pagedown": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivatePrevItem"
}
},
{
@@ -14,8 +14,6 @@
"bindings": {
"ctrl-shift-up": "editor::AddSelectionAbove",
"ctrl-shift-down": "editor::AddSelectionBelow",
"cmd-ctrl-up": "editor::MoveLineUp",
"cmd-ctrl-down": "editor::MoveLineDown",
"cmd-shift-space": "editor::SelectAll",
"ctrl-shift-m": "editor::SelectLargerSyntaxNode",
"cmd-shift-l": "editor::SplitSelectionIntoLines",
@@ -29,7 +27,6 @@
"ctrl-,": "editor::GoToPrevHunk",
"cmd-k cmd-u": "editor::ConvertToUpperCase",
"cmd-k cmd-l": "editor::ConvertToLowerCase",
"cmd-shift-j": "editor::JoinLines",
"shift-alt-m": "markdown::OpenPreviewToTheSide",
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
"ctrl-delete": "editor::DeleteToNextWordEnd"

View File

@@ -1,6 +1,12 @@
[
{
"context": "VimControl && !menu",
"context": "ProjectPanel || Editor",
"bindings": {
"ctrl-6": "pane::AlternateFile"
}
},
{
"context": "Editor && VimControl && !VimWaiting && !menu",
"bindings": {
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
@@ -10,11 +16,7 @@
"backspace": "vim::Backspace",
"j": "vim::Down",
"down": "vim::Down",
"ctrl-j": "vim::Down",
"enter": "vim::NextLineStart",
"ctrl-m": "vim::NextLineStart",
"+": "vim::NextLineStart",
"-": "vim::PreviousLineStart",
"tab": "vim::Tab",
"shift-tab": "vim::Tab",
"k": "vim::Up",
@@ -89,9 +91,8 @@
"g t": "pane::ActivateNextItem",
"g shift-t": "pane::ActivatePrevItem",
"g d": "editor::GoToDefinition",
"g shift-d": "editor::GoToDeclaration",
"g y": "editor::GoToTypeDefinition",
"g shift-i": "editor::GoToImplementation",
"g shift-d": "editor::GoToTypeDefinition",
"g cmd-d": "editor::GoToImplementation",
"g x": "editor::OpenUrl",
"g n": "vim::SelectNextMatch",
"g shift-n": "vim::SelectPreviousMatch",
@@ -196,20 +197,20 @@
"ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit",
"ctrl-w space": "editor::OpenExcerptsSplit",
"ctrl-w g space": "editor::OpenExcerptsSplit",
"ctrl-6": "pane::AlternateFile"
"-": "pane::RevealInProjectPanel"
}
},
{
"context": "VimControl && VimCount",
"bindings": {
"0": ["vim::Number", 0]
}
},
{
"context": "vim_mode == normal",
// escape is in its own section so that it cancels a pending count.
"context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting",
"bindings": {
"escape": "editor::Cancel",
"ctrl-[": "editor::Cancel",
"ctrl-[": "editor::Cancel"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting",
"bindings": {
".": "vim::Repeat",
"c": ["vim::PushOperator", "Change"],
"shift-c": "vim::ChangeToEndOfLine",
@@ -217,7 +218,7 @@
"shift-d": "vim::DeleteToEndOfLine",
"shift-j": "vim::JoinLines",
"y": ["vim::PushOperator", "Yank"],
"shift-y": "vim::YankToEndOfLine",
"shift-y": "vim::YankLine",
"i": "vim::InsertBefore",
"shift-i": "vim::InsertFirstNonWhitespace",
"a": "vim::InsertAfter",
@@ -253,12 +254,127 @@
"] d": "editor::GoToDiagnostic",
"[ d": "editor::GoToPrevDiagnostic",
"] c": "editor::GoToHunk",
"[ c": "editor::GoToPrevHunk",
"g c": ["vim::PushOperator", "ToggleComments"]
"[ c": "editor::GoToPrevHunk"
}
},
{
"context": "vim_mode == visual",
"context": "Editor && vim_mode == visual && vim_operator == none && !VimWaiting",
"bindings": {
"\"": ["vim::PushOperator", "Register"],
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
}
},
{
"context": "Editor && VimCount && vim_mode != insert",
"bindings": {
"0": ["vim::Number", 0]
}
},
{
"context": "Editor && vim_operator == c",
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename" // zed specific
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == c",
"bindings": {
"s": ["vim::PushOperator", { "ChangeSurrounds": {} }]
}
},
{
"context": "Editor && vim_operator == d",
"bindings": {
"d": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == gu",
"bindings": {
"g u": "vim::CurrentLine",
"u": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == gU",
"bindings": {
"g shift-u": "vim::CurrentLine",
"shift-u": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == g~",
"bindings": {
"g ~": "vim::CurrentLine",
"~": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == d",
"bindings": {
"s": ["vim::PushOperator", "DeleteSurrounds"]
}
},
{
"context": "Editor && vim_operator == y",
"bindings": {
"y": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == y",
"bindings": {
"s": ["vim::PushOperator", { "AddSurrounds": {} }]
}
},
{
"context": "Editor && vim_operator == ys",
"bindings": {
"s": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == >",
"bindings": {
">": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == <",
"bindings": {
"<": "vim::CurrentLine"
}
},
{
"context": "Editor && VimObject",
"bindings": {
"w": "vim::Word",
"shift-w": ["vim::Word", { "ignorePunctuation": true }],
"t": "vim::Tag",
"s": "vim::Sentence",
"p": "vim::Paragraph",
"'": "vim::Quotes",
"`": "vim::BackQuotes",
"\"": "vim::DoubleQuotes",
"|": "vim::VerticalBars",
"(": "vim::Parentheses",
")": "vim::Parentheses",
"b": "vim::Parentheses",
"[": "vim::SquareBrackets",
"]": "vim::SquareBrackets",
"{": "vim::CurlyBrackets",
"}": "vim::CurlyBrackets",
"shift-b": "vim::CurlyBrackets",
"<": "vim::AngleBrackets",
">": "vim::AngleBrackets",
"a": "vim::Argument"
}
},
{
"context": "Editor && vim_mode == visual && !VimWaiting && !VimObject",
"bindings": {
"u": "vim::ConvertToLowerCase",
"U": "vim::ConvertToUpperCase",
@@ -293,16 +409,23 @@
">": "vim::Indent",
"<": "vim::Outdent",
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
"g c": "vim::ToggleComments",
"\"": ["vim::PushOperator", "Register"],
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
"a": ["vim::PushOperator", { "Object": { "around": true } }]
}
},
{
"context": "vim_mode == insert",
"context": "Editor && vim_mode == normal && !VimWaiting",
"bindings": {
"g c c": "vim::ToggleComments"
}
},
{
"context": "Editor && vim_mode == visual",
"bindings": {
"g c": "vim::ToggleComments"
}
},
{
"context": "Editor && vim_mode == insert",
"bindings": {
"escape": "vim::NormalBefore",
"ctrl-c": "vim::NormalBefore",
@@ -317,131 +440,34 @@
"ctrl-u": "editor::DeleteToBeginningOfLine",
"ctrl-t": "vim::Indent",
"ctrl-d": "vim::Outdent",
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
"ctrl-r": ["vim::PushOperator", "Register"]
}
},
{
"context": "vim_mode == replace",
"context": "Editor && vim_mode == replace",
"bindings": {
"escape": "vim::NormalBefore",
"ctrl-c": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore",
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
"backspace": "vim::UndoReplace",
"tab": "vim::Tab",
"enter": "vim::Enter"
"enter": "vim::Enter",
"backspace": "vim::UndoReplace"
}
},
{
"context": "vim_mode == waiting",
"context": "Editor && vim_mode != replace && VimWaiting",
"bindings": {
"tab": "vim::Tab",
"enter": "vim::Enter",
"escape": "vim::ClearOperators",
"ctrl-c": "vim::ClearOperators",
"ctrl-[": "vim::ClearOperators",
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }]
"escape": ["vim::SwitchMode", "Normal"],
"ctrl-[": ["vim::SwitchMode", "Normal"]
}
},
{
"context": "vim_mode == operator",
"context": "Editor && vim_mode == insert && VimWaiting",
"bindings": {
"escape": "vim::ClearOperators",
"ctrl-c": "vim::ClearOperators",
"ctrl-[": "vim::ClearOperators"
}
},
{
"context": "vim_operator == a || vim_operator == i || vim_operator == cs",
"bindings": {
"w": "vim::Word",
"shift-w": ["vim::Word", { "ignorePunctuation": true }],
"t": "vim::Tag",
"s": "vim::Sentence",
"p": "vim::Paragraph",
"'": "vim::Quotes",
"`": "vim::BackQuotes",
"\"": "vim::DoubleQuotes",
"|": "vim::VerticalBars",
"(": "vim::Parentheses",
")": "vim::Parentheses",
"b": "vim::Parentheses",
"[": "vim::SquareBrackets",
"]": "vim::SquareBrackets",
"{": "vim::CurlyBrackets",
"}": "vim::CurlyBrackets",
"shift-b": "vim::CurlyBrackets",
"<": "vim::AngleBrackets",
">": "vim::AngleBrackets",
"a": "vim::Argument"
}
},
{
"context": "vim_operator == c",
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename", // zed specific
"s": ["vim::PushOperator", { "ChangeSurrounds": {} }]
}
},
{
"context": "vim_operator == d",
"bindings": {
"d": "vim::CurrentLine",
"s": ["vim::PushOperator", "DeleteSurrounds"]
}
},
{
"context": "vim_operator == gu",
"bindings": {
"g u": "vim::CurrentLine",
"u": "vim::CurrentLine"
}
},
{
"context": "vim_operator == gU",
"bindings": {
"g shift-u": "vim::CurrentLine",
"shift-u": "vim::CurrentLine"
}
},
{
"context": "vim_operator == g~",
"bindings": {
"g ~": "vim::CurrentLine",
"~": "vim::CurrentLine"
}
},
{
"context": "vim_operator == y",
"bindings": {
"y": "vim::CurrentLine",
"s": ["vim::PushOperator", { "AddSurrounds": {} }]
}
},
{
"context": "vim_operator == ys",
"bindings": {
"s": "vim::CurrentLine"
}
},
{
"context": "vim_operator == >",
"bindings": {
">": "vim::CurrentLine"
}
},
{
"context": "vim_operator == <",
"bindings": {
"<": "vim::CurrentLine"
}
},
{
"context": "vim_operator == gc",
"bindings": {
"c": "vim::CurrentLine"
"escape": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore"
}
},
{
@@ -481,8 +507,7 @@
"x": "project_panel::RevealInFileManager",
"shift-g": "menu::SelectLast",
"g g": "menu::SelectFirst",
"-": "project_panel::SelectParent",
"ctrl-6": "pane::AlternateFile"
"-": "project_panel::SelectParent"
}
},
{

View File

@@ -1,61 +0,0 @@
{{#if language_name}}
Here's a file of {{language_name}} that I'm going to ask you to make an edit to.
{{else}}
Here's a file of text that I'm going to ask you to make an edit to.
{{/if}}
{{#if is_insert}}
The point you'll need to insert at is marked with <insert_here></insert_here>.
{{else}}
The section you'll need to rewrite is marked with <rewrite_this></rewrite_this> tags.
{{/if}}
<document>
{{{document_content}}}
</document>
{{#if is_truncated}}
The context around the relevant section has been truncated (possibly in the middle of a line) for brevity.
{{/if}}
{{#if is_insert}}
You can't replace {{content_type}}, your answer will be inserted in place of the `<insert_here></insert_here>` tags. Don't include the insert_here tags in your output.
Generate {{content_type}} based on the following prompt:
<prompt>
{{{user_prompt}}}
</prompt>
Match the indentation in the original file in the inserted {{content_type}}, don't include any indentation on blank lines.
Immediately start with the following format with no remarks:
```
{{INSERTED_CODE}}
```
{{else}}
Edit the section of {{content_type}} in <rewrite_this></rewrite_this> tags based on the following prompt:
<prompt>
{{{user_prompt}}}
</prompt>
{{#if rewrite_section}}
And here's the section to rewrite based on that prompt again for reference:
<rewrite_this>
{{{rewrite_section}}}
</rewrite_this>
{{/if}}
Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved.
Start at the indentation level in the original file in the rewritten {{content_type}}. Don't stop until you've rewritten the entire section, even if you have no more changes to make, always write out the whole section with no unnecessary elisions.
Immediately start with the following format with no remarks:
```
{{REWRITTEN_CODE}}
```
{{/if}}

View File

@@ -1,87 +0,0 @@
Guide the user through code changes in numbered steps that focus on individual functions, type definitions, etc.
Surround each distinct step in a <step></step> XML tag.
Here's an example of a user-assistant dialog with step numbers and a follow-up question in Rust:
1. User: I want to create a User struct with name and age fields, and a method to print the user's information. Can you help me with that?
A: Certainly! Let's create a User struct with the specified fields and a method to print the information:
<step>Define the User struct
```rust
struct User {
name: String,
age: u32,
}
```
</step>
<step>Implement methods for the User struct
```rust
impl User {
fn new(name: String, age: u32) -> Self {
User { name, age }
}
fn print_info(&self) {
println!("Name: {}, Age: {}", self.name, self.age);
}
}
```
</step>
This code defines a User struct and implements two methods:
- `new`: a constructor to create a new User
- `print_info`: a method to print the user's information
2. User: Great! Now, how can I create a User instance and call the print_info method?
A: You can create a User instance and call the print_info method like this:
<step>Create a User instance and call print_info
```rust
fn main() {
let user = User::new(String::from("Alice"), 30);
user.print_info();
}
```
</step>
This will output: `Name: Alice, Age: 30`
3. User: Can you add an email field to the User struct and update the print_info method to include it?
A: Certainly! Let's modify the User struct and update the print_info method:
<step>Add email field to User struct
```rust
struct User {
name: String,
age: u32,
email: String,
}
```
</step>
<step>Update the new method and print_info method
```rust
impl User {
fn new(name: String, age: u32, email: String) -> Self {
User { name, age, email }
}
fn print_info(&self) {
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
}
}
```
</step>
<step>Update the main function
```rust
fn main() {
let user = User::new(String::from("Alice"), 30, String::from("alice@example.com"));
user.print_info();
}
```
</step>
This will now output: `Name: Alice, Age: 30, Email: alice@example.com`
The changes include:
1. Adding the `email` field to the User struct
2. Updating the `new` method to accept an email parameter
3. Modifying the `print_info` method to include the email
4. Updating the main function to provide an email when creating a User instance

View File

@@ -1,413 +0,0 @@
Your task is to map a step from the conversation above to suggestions on symbols inside the provided source files.
Guidelines:
- There's no need to describe *what* to do, just *where* to do it.
- If creating a file, assume any subsequent updates are included at the time of creation.
- Don't create and then update a file.
- We'll create it in one shot.
- Prefer updating symbols lower in the syntax tree if possible.
- Never include suggestions on a parent symbol and one of its children in the same suggestions block.
- Never nest an operation with another operation or include CDATA or other content. All suggestions are leaf nodes.
- Include a description attribute for each operation with a brief, one-line description of the change to perform.
- Descriptions are required for all suggestions except delete.
- When generating multiple suggestions, ensure the descriptions are specific to each individual operation.
- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide.
- Don't generate multiple suggestions at the same location. Instead, combine them together in a single operation with a succinct combined description.
Example 1:
User:
```rs src/rectangle.rs
struct Rectangle {
width: f64,
height: f64,
}
impl Rectangle {
fn new(width: f64, height: f64) -> Self {
Rectangle { width, height }
}
}
```
<step>Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct</step>
<step>Implement the 'Display' trait for the Rectangle struct</step>
What are the suggestions for the step: <step>Add a new method 'calculate_area' to the Rectangle struct</step>
A (wrong):
{
"title": "Add Rectangle methods",
"suggestions": [
{
"kind": "AppendChild",
"path": "src/shapes.rs",
"symbol": "impl Rectangle",
"description": "Add calculate_area method"
},
{
"kind": "AppendChild",
"path": "src/shapes.rs",
"symbol": "impl Rectangle",
"description": "Add calculate_perimeter method"
}
]
}
This demonstrates what NOT to do. NEVER append multiple children at the same location.
A (corrected):
{
"title": "Add Rectangle methods",
"suggestions": [
{
"kind": "AppendChild",
"path": "src/shapes.rs",
"symbol": "impl Rectangle",
"description": "Add calculate area and perimeter methods"
}
]
}
User:
What are the suggestions for the step: <step>Implement the 'Display' trait for the Rectangle struct</step>
A:
{
"title": "Implement Display for Rectangle",
"suggestions": [
{
"kind": "InsertSiblingAfter",
"path": "src/shapes.rs",
"symbol": "impl Rectangle",
"description": "Implement Display trait for Rectangle"
}
]
}
Example 2:
User:
```rs src/user.rs
struct User {
pub name: String,
age: u32,
email: String,
}
impl User {
fn new(name: String, age: u32, email: String) -> Self {
User { name, age, email }
}
pub fn print_info(&self) {
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
}
}
```
<step>Update the 'print_info' method to use formatted output</step>
<step>Remove the 'email' field from the User struct</step>
What are the suggestions for the step: <step>Update the 'print_info' method to use formatted output</step>
A:
{
"title": "Use formatted output",
"suggestions": [
{
"kind": "Update",
"path": "src/user.rs",
"symbol": "impl User pub fn print_info",
"description": "Use formatted output"
}
]
}
User:
What are the suggestions for the step: <step>Remove the 'email' field from the User struct</step>
A:
{
"title": "Remove email field",
"suggestions": [
{
"kind": "Delete",
"path": "src/user.rs",
"symbol": "struct User email"
}
]
}
Example 3:
User:
```rs src/vehicle.rs
struct Vehicle {
make: String,
model: String,
year: u32,
}
impl Vehicle {
fn new(make: String, model: String, year: u32) -> Self {
Vehicle { make, model, year }
}
fn print_year(&self) {
println!("Year: {}", self.year);
}
}
```
<step>Add a 'use std::fmt;' statement at the beginning of the file</step>
<step>Add a new method 'start_engine' in the Vehicle impl block</step>
What are the suggestions for the step: <step>Add a 'use std::fmt;' statement at the beginning of the file</step>
A:
{
"title": "Add use std::fmt statement",
"suggestions": [
{
"kind": "PrependChild",
"path": "src/vehicle.rs",
"description": "Add 'use std::fmt' statement"
}
]
}
User:
What are the suggestions for the step: <step>Add a new method 'start_engine' in the Vehicle impl block</step>
A:
{
"title": "Add start_engine method",
"suggestions": [
{
"kind": "InsertSiblingAfter",
"path": "src/vehicle.rs",
"symbol": "impl Vehicle fn new",
"description": "Add start_engine method"
}
]
}
Example 4:
User:
```rs src/employee.rs
struct Employee {
name: String,
position: String,
salary: u32,
department: String,
}
impl Employee {
fn new(name: String, position: String, salary: u32, department: String) -> Self {
Employee { name, position, salary, department }
}
fn print_details(&self) {
println!("Name: {}, Position: {}, Salary: {}, Department: {}",
self.name, self.position, self.salary, self.department);
}
fn give_raise(&mut self, amount: u32) {
self.salary += amount;
}
}
```
<step>Make salary an f32</step>
What are the suggestions for the step: <step>Make salary an f32</step>
A (wrong):
{
"title": "Change salary to f32",
"suggestions": [
{
"kind": "Update",
"path": "src/employee.rs",
"symbol": "struct Employee",
"description": "Change the type of salary to an f32"
},
{
"kind": "Update",
"path": "src/employee.rs",
"symbol": "struct Employee salary",
"description": "Change the type to an f32"
}
]
}
This example demonstrates what not to do. `struct Employee salary` is a child of `struct Employee`.
A (corrected):
{
"title": "Change salary to f32",
"suggestions": [
{
"kind": "Update",
"path": "src/employee.rs",
"symbol": "struct Employee salary",
"description": "Change the type to an f32"
}
]
}
User:
What are the correct suggestions for the step: <step>Remove the 'department' field and update the 'print_details' method</step>
A:
{
"title": "Remove department",
"suggestions": [
{
"kind": "Delete",
"path": "src/employee.rs",
"symbol": "struct Employee department"
},
{
"kind": "Update",
"path": "src/employee.rs",
"symbol": "impl Employee fn print_details",
"description": "Don't print the 'department' field"
}
]
}
Example 5:
User:
```rs src/game.rs
struct Player {
name: String,
health: i32,
pub score: u32,
}
impl Player {
pub fn new(name: String) -> Self {
Player { name, health: 100, score: 0 }
}
}
struct Game {
players: Vec<Player>,
}
impl Game {
fn new() -> Self {
Game { players: Vec::new() }
}
}
```
<step>Add a 'level' field to Player and update the 'new' method</step>
A:
{
"title": "Add level field to Player",
"suggestions": [
{
"kind": "InsertSiblingAfter",
"path": "src/game.rs",
"symbol": "struct Player pub score",
"description": "Add level field to Player"
},
{
"kind": "Update",
"path": "src/game.rs",
"symbol": "impl Player pub fn new",
"description": "Initialize level in new method"
}
]
}
Example 6:
User:
```rs src/config.rs
use std::collections::HashMap;
struct Config {
settings: HashMap<String, String>,
}
impl Config {
fn new() -> Self {
Config { settings: HashMap::new() }
}
}
```
<step>Add a 'load_from_file' method to Config and import necessary modules</step>
A:
{
"title": "Add load_from_file method",
"suggestions": [
{
"kind": "PrependChild",
"path": "src/config.rs",
"description": "Import std::fs and std::io modules"
},
{
"kind": "AppendChild",
"path": "src/config.rs",
"symbol": "impl Config",
"description": "Add load_from_file method"
}
]
}
Example 7:
User:
```rs src/database.rs
pub(crate) struct Database {
connection: Connection,
}
impl Database {
fn new(url: &str) -> Result<Self, Error> {
let connection = Connection::connect(url)?;
Ok(Database { connection })
}
async fn query(&self, sql: &str) -> Result<Vec<Row>, Error> {
self.connection.query(sql, &[])
}
}
```
<step>Add error handling to the 'query' method and create a custom error type</step>
A:
{
"title": "Add error handling to query",
"suggestions": [
{
"kind": "PrependChild",
"path": "src/database.rs",
"description": "Import necessary error handling modules"
},
{
"kind": "InsertSiblingBefore",
"path": "src/database.rs",
"symbol": "pub(crate) struct Database",
"description": "Define custom DatabaseError enum"
},
{
"kind": "Update",
"path": "src/database.rs",
"symbol": "impl Database async fn query",
"description": "Implement error handling in query method"
}
]
}
Now generate the suggestions for the following step:

View File

@@ -1,18 +0,0 @@
You are an expert terminal user.
You will be given a description of a command and you need to respond with a command that matches the description.
Do not include markdown blocks or any other text formatting in your response, always respond with a single command that can be executed in the given shell.
Current OS name is '{{os}}', architecture is '{{arch}}'.
{{#if shell}}
Current shell is '{{shell}}'.
{{/if}}
{{#if working_directory}}
Current working directory is '{{working_directory}}'.
{{/if}}
{{#if latest_output}}
Latest non-empty terminal output:
{{#each latest_output as |line|}}
{{line}}
{{/each}}
{{/if}}
Here is the description of the command:
{{{user_prompt}}}

View File

@@ -26,9 +26,6 @@
},
// The name of a font to use for rendering text in the editor
"buffer_font_family": "Zed Plex Mono",
// Set the buffer text's font fallbacks, this will be merged with
// the platform's default fallbacks.
"buffer_font_fallbacks": [],
// The OpenType features to enable for text in the editor.
"buffer_font_features": {
// Disable ligatures:
@@ -50,11 +47,8 @@
// },
"buffer_line_height": "comfortable",
// The name of a font to use for rendering text in the UI
// You can set this to ".SystemUIFont" to use the system font
// (On macOS) You can set this to ".SysmtemUIFont" to use the system font
"ui_font_family": "Zed Plex Sans",
// Set the UI's font fallbacks, this will be merged with the platform's
// default font fallbacks.
"ui_font_fallbacks": [],
// The OpenType features to enable for text in the UI
"ui_font_features": {
// Disable ligatures:
@@ -88,7 +82,7 @@
// Whether to confirm before quitting Zed.
"confirm_quit": false,
// Whether to restore last closed project when fresh Zed instance is opened.
"restore_on_startup": "last_session",
"restore_on_startup": "last_workspace",
// Size of the drop target in the editor.
"drop_target_size": 0.2,
// Whether the window should be closed when using 'close active item' on a window with no tabs.
@@ -100,9 +94,6 @@
// 3. Never close the window
// "when_closing_with_no_tabs": "keep_window_open",
"when_closing_with_no_tabs": "platform_default",
// Whether to use the system provided dialogs for Open and Save As.
// When set to false, Zed will use the built-in keyboard-first pickers.
"use_system_path_prompts": true,
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// How to highlight the current line in the editor.
@@ -125,11 +116,6 @@
// The debounce delay before re-querying the language server for completion
// documentation when not included in original completion list.
"completion_documentation_secondary_query_debounce": 300,
// Show method signatures in the editor, when inside parentheses.
"auto_signature_help": false,
/// Whether to show the signature help after completion or a bracket pair inserted.
/// If `auto_signature_help` is enabled, this setting will be treated as enabled also.
"show_signature_help_after_edits": true,
// Whether to show wrap guides (vertical rulers) in the editor.
// Setting this to true will show a guide at the 'preferred_line_length' value
// if softwrap is set to 'preferred_line_length', and will show any
@@ -142,7 +128,14 @@
// The default number of lines to expand excerpts in the multibuffer by.
"expand_excerpt_lines": 3,
// Globs to match against file paths to determine if a file is private.
"private_files": ["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"],
"private_files": [
"**/.env*",
"**/*.pem",
"**/*.key",
"**/*.cert",
"**/*.crt",
"**/secrets.yml"
],
// Whether to use additional LSP queries to format (and amend) the code after
// every "trigger" symbol input, defined by LSP server capabilities.
"use_on_type_format": true,
@@ -269,8 +262,6 @@
// to both the horizontal and vertical delta values while scrolling.
"scroll_sensitivity": 1.0,
"relative_line_numbers": false,
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
"search_wrap": true,
// When to populate a new search's query based on the text under the cursor.
// This setting can take the following three values:
//
@@ -318,7 +309,7 @@
"auto_reveal_entries": true,
// Whether to fold directories automatically and show compact folders
// (e.g. "a/b/c" ) when a directory has only one subdirectory inside.
"auto_fold_dirs": true,
"auto_fold_dirs": false,
/// Scrollbar-related settings
"scrollbar": {
/// When to show the scrollbar in the project panel.
@@ -381,7 +372,7 @@
},
"assistant": {
// Version of this setting.
"version": "2",
"version": "1",
// Whether the assistant is enabled.
"enabled": true,
// Whether to show the assistant panel button in the status bar.
@@ -392,12 +383,17 @@
"default_width": 640,
// Default height when the assistant is docked to the bottom.
"default_height": 320,
// The default model to use when creating new contexts.
"default_model": {
// The provider to use.
"provider": "openai",
// The model to use.
"model": "gpt-4o"
// AI provider.
"provider": {
"name": "openai",
// The default model to use when creating new contexts. This
// setting can take three values:
//
// 1. "gpt-3.5-turbo"
// 2. "gpt-4"
// 3. "gpt-4-turbo-preview"
// 4. "gpt-4o"
"default_model": "gpt-4o"
}
},
// Whether the screen sharing icon is shown in the os status bar.
@@ -435,9 +431,7 @@
// Show git status colors in the editor tabs.
"git_status": false,
// Position of the close button on the editor tabs.
"close_position": "right",
// Whether to show the file icon for a tab.
"file_icons": false
"close_position": "right"
},
// Settings related to preview tabs.
"preview_tabs": {
@@ -545,14 +539,6 @@
// "delay_ms": 600
}
},
// Configuration for how direnv configuration should be loaded. May take 2 values:
// 1. Load direnv configuration through the shell hook, works for POSIX shells and fish.
// "load_direnv": "shell_hook"
// 2. Load direnv configuration using `direnv export json` directly.
// This can help with some shells that otherwise would not detect
// the direnv environment, such as nushell or elvish.
// "load_direnv": "direct"
"load_direnv": "shell_hook",
"inline_completions": {
// A list of globs representing files that inline completions should be disabled for.
"disabled_globs": [".env"]
@@ -635,7 +621,7 @@
// "option_to_meta": false,
// 2. Make the option keys behave as a 'meta' key, e.g. for emacs
// "option_to_meta": true,
"option_as_meta": true,
"option_as_meta": false,
// Whether or not selecting text in the terminal will automatically
// copy to the system clipboard.
"copy_on_select": false,
@@ -681,10 +667,6 @@
// Set the terminal's font family. If this option is not included,
// the terminal will default to matching the buffer's font family.
// "font_family": "Zed Plex Mono",
// Set the terminal's font fallbacks. If this option is not included,
// the terminal will default to matching the buffer's font fallbacks.
// This will be merged with the platform's default font fallbacks
// "font_fallbacks": ["FiraCode Nerd Fonts"],
// Sets the maximum number of lines in the terminal's scrollback buffer.
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
// Existing terminals will not pick up this change until they are recreated.
@@ -709,7 +691,7 @@
//
"file_types": {
"JSON": ["flake.lock"],
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "tsconfig.json"]
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json"]
},
// The extensions that Zed should automatically install on startup.
//
@@ -732,12 +714,10 @@
}
},
"C": {
"format_on_save": "off",
"use_on_type_format": false
"format_on_save": "off"
},
"C++": {
"format_on_save": "off",
"use_on_type_format": false
"format_on_save": "off"
},
"CSS": {
"prettier": {
@@ -747,9 +727,6 @@
"Elixir": {
"language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
},
"Erlang": {
"language_servers": ["erlang-ls", "!elp", "..."]
},
"Go": {
"code_actions_on_format": {
"source.organizeImports": true
@@ -792,13 +769,11 @@
},
"Markdown": {
"format_on_save": "off",
"use_on_type_format": false,
"prettier": {
"allowed": true
}
},
"PHP": {
"language_servers": ["phpactor", "!intelephense", "..."],
"prettier": {
"allowed": true,
"plugins": ["@prettier/plugin-php"],
@@ -806,7 +781,7 @@
}
},
"Ruby": {
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "..."]
"language_servers": ["solargraph", "!ruby-lsp", "..."]
},
"SCSS": {
"prettier": {
@@ -819,9 +794,6 @@
"plugins": ["prettier-plugin-sql"]
}
},
"Starlark": {
"language_servers": ["starpls", "!buck2-lsp", "..."]
},
"Svelte": {
"prettier": {
"allowed": true,
@@ -862,23 +834,6 @@
}
}
},
// Different settings for specific language models.
"language_models": {
"anthropic": {
"version": "1",
"api_url": "https://api.anthropic.com"
},
"google": {
"api_url": "https://generativelanguage.googleapis.com"
},
"ollama": {
"api_url": "http://localhost:11434"
},
"openai": {
"version": "1",
"api_url": "https://api.openai.com/v1"
}
},
// Zed's Prettier integration settings.
// Allows to enable/disable formatting with Prettier
// and configure default Prettier, used when no project-level Prettier installation is found.
@@ -911,21 +866,11 @@
// }
// }
},
// Jupyter settings
"jupyter": {
"enabled": true
// Specify the language name as the key and the kernel name as the value.
// "kernel_selections": {
// "python": "conda-base"
// "typescript": "deno"
// }
},
// Vim settings
"vim": {
"use_system_clipboard": "always",
"use_multiline_find": false,
"use_smartcase_find": false,
"custom_digraphs": {}
"use_smartcase_find": false
},
// The server to connect to. If the environment variable
// ZED_SERVER_URL is set, it will override this setting.
@@ -973,29 +918,5 @@
// Examples:
// - "proxy": "socks5://localhost:10808"
// - "proxy": "http://127.0.0.1:10809"
"proxy": null,
// Set to configure aliases for the command palette.
// When typing a query which is a key of this object, the value will be used instead.
//
// Examples:
// {
// "W": "workspace::Save"
// }
"command_aliases": {},
// ssh_connections is an array of ssh connections.
// By default this setting is null, which disables the direct ssh connection support.
// You can configure these from `project: Open Remote` in the command palette.
// Zed's ssh support will pull configuration from your ~/.ssh too.
// Examples:
// [
// {
// "host": "example-box",
// "projects": [
// {
// "paths": ["/home/user/code/zed"]
// }
// ]
// }
// ]
"ssh_connections": null
"proxy": null
}

View File

@@ -1,5 +1,5 @@
// Folder-specific settings
//
// For a full list of overridable settings, and general information on folder-specific settings,
// see the documentation: https://zed.dev/docs/configuring-zed#settings-files
// see the documentation: https://zed.dev/docs/configuring-zed#folder-specific-settings
{}

View File

@@ -17,27 +17,6 @@
// What to do with the terminal pane and tab, after the command was started:
// * `always` — always show the terminal pane, add and focus the corresponding task's tab in it (default)
// * `never` — avoid changing current terminal pane focus, but still add/reuse the task's tab there
"reveal": "always",
// What to do with the terminal pane and tab, after the command had finished:
// * `never` — Do nothing when the command finishes (default)
// * `always` — always hide the terminal tab, hide the pane also if it was the last tab in it
// * `on_success` — hide the terminal tab on task success only, otherwise behaves similar to `always`
"hide": "never",
// Which shell to use when running a task inside the terminal.
// May take 3 values:
// 1. (default) Use the system's default terminal configuration in /etc/passwd
// "shell": "system"
// 2. A program:
// "shell": {
// "program": "sh"
// }
// 3. A program with arguments:
// "shell": {
// "with_arguments": {
// "program": "/bin/bash",
// "arguments": ["--login"]
// }
// }
"shell": "system"
"reveal": "always"
}
]

View File

@@ -4,8 +4,8 @@
// documentation: https://zed.dev/docs/configuring-zed
//
// To see all of Zed's default settings without changing your
// custom settings, run `zed: open default settings` from the
// command palette
// custom settings, run the `open default settings` command
// from the command palette or from `Zed` application menu.
{
"ui_font_size": 16,
"buffer_font_size": 16,

View File

@@ -38,7 +38,6 @@
"icon.accent": "#10a793ff",
"status_bar.background": "#262933ff",
"title_bar.background": "#262933ff",
"title_bar.inactive_background": "#21242bff",
"toolbar.background": "#1e2025ff",
"tab_bar.background": "#21242bff",
"tab.inactive_background": "#21242bff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#566ddaff",
"status_bar.background": "#3a353fff",
"title_bar.background": "#3a353fff",
"title_bar.inactive_background": "#221f26ff",
"toolbar.background": "#19171cff",
"tab_bar.background": "#221f26ff",
"tab.inactive_background": "#221f26ff",
@@ -423,7 +422,6 @@
"icon.accent": "#586cdaff",
"status_bar.background": "#bfbcc5ff",
"title_bar.background": "#bfbcc5ff",
"title_bar.inactive_background": "#e6e3ebff",
"toolbar.background": "#efecf4ff",
"tab_bar.background": "#e6e3ebff",
"tab.inactive_background": "#e6e3ebff",
@@ -808,7 +806,6 @@
"icon.accent": "#6684e0ff",
"status_bar.background": "#45433bff",
"title_bar.background": "#45433bff",
"title_bar.inactive_background": "#262622ff",
"toolbar.background": "#20201dff",
"tab_bar.background": "#262622ff",
"tab.inactive_background": "#262622ff",
@@ -1193,7 +1190,6 @@
"icon.accent": "#6684dfff",
"status_bar.background": "#cecab4ff",
"title_bar.background": "#cecab4ff",
"title_bar.inactive_background": "#eeebd7ff",
"toolbar.background": "#fefbecff",
"tab_bar.background": "#eeebd7ff",
"tab.inactive_background": "#eeebd7ff",
@@ -1578,7 +1574,6 @@
"icon.accent": "#36a165ff",
"status_bar.background": "#424136ff",
"title_bar.background": "#424136ff",
"title_bar.inactive_background": "#2c2b23ff",
"toolbar.background": "#22221bff",
"tab_bar.background": "#2c2b23ff",
"tab.inactive_background": "#2c2b23ff",
@@ -1963,7 +1958,6 @@
"icon.accent": "#37a165ff",
"status_bar.background": "#c5c4b9ff",
"title_bar.background": "#c5c4b9ff",
"title_bar.inactive_background": "#ebeae3ff",
"toolbar.background": "#f4f3ecff",
"tab_bar.background": "#ebeae3ff",
"tab.inactive_background": "#ebeae3ff",
@@ -2348,7 +2342,6 @@
"icon.accent": "#407ee6ff",
"status_bar.background": "#443c39ff",
"title_bar.background": "#443c39ff",
"title_bar.inactive_background": "#27211eff",
"toolbar.background": "#1b1918ff",
"tab_bar.background": "#27211eff",
"tab.inactive_background": "#27211eff",
@@ -2733,7 +2726,6 @@
"icon.accent": "#407ee6ff",
"status_bar.background": "#ccc7c5ff",
"title_bar.background": "#ccc7c5ff",
"title_bar.inactive_background": "#e9e6e4ff",
"toolbar.background": "#f0eeedff",
"tab_bar.background": "#e9e6e4ff",
"tab.inactive_background": "#e9e6e4ff",
@@ -3118,7 +3110,6 @@
"icon.accent": "#5169ebff",
"status_bar.background": "#433a43ff",
"title_bar.background": "#433a43ff",
"title_bar.inactive_background": "#252025ff",
"toolbar.background": "#1b181bff",
"tab_bar.background": "#252025ff",
"tab.inactive_background": "#252025ff",
@@ -3503,7 +3494,6 @@
"icon.accent": "#5169ebff",
"status_bar.background": "#c6b8c6ff",
"title_bar.background": "#c6b8c6ff",
"title_bar.inactive_background": "#e0d5e0ff",
"toolbar.background": "#f7f3f7ff",
"tab_bar.background": "#e0d5e0ff",
"tab.inactive_background": "#e0d5e0ff",
@@ -3888,7 +3878,6 @@
"icon.accent": "#267eadff",
"status_bar.background": "#33444dff",
"title_bar.background": "#33444dff",
"title_bar.inactive_background": "#1c2529ff",
"toolbar.background": "#161b1dff",
"tab_bar.background": "#1c2529ff",
"tab.inactive_background": "#1c2529ff",
@@ -4273,7 +4262,6 @@
"icon.accent": "#267eadff",
"status_bar.background": "#a6cadcff",
"title_bar.background": "#a6cadcff",
"title_bar.inactive_background": "#cdeaf9ff",
"toolbar.background": "#ebf8ffff",
"tab_bar.background": "#cdeaf9ff",
"tab.inactive_background": "#cdeaf9ff",
@@ -4658,7 +4646,6 @@
"icon.accent": "#7272caff",
"status_bar.background": "#3b3535ff",
"title_bar.background": "#3b3535ff",
"title_bar.inactive_background": "#252020ff",
"toolbar.background": "#1b1818ff",
"tab_bar.background": "#252020ff",
"tab.inactive_background": "#252020ff",
@@ -5043,7 +5030,6 @@
"icon.accent": "#7272caff",
"status_bar.background": "#c1bbbbff",
"title_bar.background": "#c1bbbbff",
"title_bar.inactive_background": "#ebe3e3ff",
"toolbar.background": "#f4ececff",
"tab_bar.background": "#ebe3e3ff",
"tab.inactive_background": "#ebe3e3ff",
@@ -5428,7 +5414,6 @@
"icon.accent": "#468b8fff",
"status_bar.background": "#353f39ff",
"title_bar.background": "#353f39ff",
"title_bar.inactive_background": "#1f2621ff",
"toolbar.background": "#171c19ff",
"tab_bar.background": "#1f2621ff",
"tab.inactive_background": "#1f2621ff",
@@ -5813,7 +5798,6 @@
"icon.accent": "#488b90ff",
"status_bar.background": "#bcc5bfff",
"title_bar.background": "#bcc5bfff",
"title_bar.inactive_background": "#e3ebe6ff",
"toolbar.background": "#ecf4eeff",
"tab_bar.background": "#e3ebe6ff",
"tab.inactive_background": "#e3ebe6ff",
@@ -6198,7 +6182,6 @@
"icon.accent": "#3e62f4ff",
"status_bar.background": "#3b453bff",
"title_bar.background": "#3b453bff",
"title_bar.inactive_background": "#1f231fff",
"toolbar.background": "#131513ff",
"tab_bar.background": "#1f231fff",
"tab.inactive_background": "#1f231fff",
@@ -6583,7 +6566,6 @@
"icon.accent": "#3e61f4ff",
"status_bar.background": "#b4ceb4ff",
"title_bar.background": "#b4ceb4ff",
"title_bar.inactive_background": "#daeedaff",
"toolbar.background": "#f3faf3ff",
"tab_bar.background": "#daeedaff",
"tab.inactive_background": "#daeedaff",
@@ -6968,7 +6950,6 @@
"icon.accent": "#3e8ed0ff",
"status_bar.background": "#3e4769ff",
"title_bar.background": "#3e4769ff",
"title_bar.inactive_background": "#262f51ff",
"toolbar.background": "#202646ff",
"tab_bar.background": "#262f51ff",
"tab.inactive_background": "#262f51ff",
@@ -7353,7 +7334,6 @@
"icon.accent": "#3e8fd0ff",
"status_bar.background": "#c1c5d8ff",
"title_bar.background": "#c1c5d8ff",
"title_bar.inactive_background": "#e5e8f5ff",
"toolbar.background": "#f5f7ffff",
"tab_bar.background": "#e5e8f5ff",
"tab.inactive_background": "#e5e8f5ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#5ac1feff",
"status_bar.background": "#313337ff",
"title_bar.background": "#313337ff",
"title_bar.inactive_background": "#1f2127ff",
"toolbar.background": "#0d1016ff",
"tab_bar.background": "#1f2127ff",
"tab.inactive_background": "#1f2127ff",
@@ -408,7 +407,6 @@
"icon.accent": "#3b9ee5ff",
"status_bar.background": "#dcdddeff",
"title_bar.background": "#dcdddeff",
"title_bar.inactive_background": "#ececedff",
"toolbar.background": "#fcfcfcff",
"tab_bar.background": "#ececedff",
"tab.inactive_background": "#ececedff",
@@ -778,7 +776,6 @@
"icon.accent": "#72cffeff",
"status_bar.background": "#464a52ff",
"title_bar.background": "#464a52ff",
"title_bar.inactive_background": "#353944ff",
"toolbar.background": "#242835ff",
"tab_bar.background": "#353944ff",
"tab.inactive_background": "#353944ff",

View File

@@ -47,7 +47,6 @@
"icon.accent": "#83a598ff",
"status_bar.background": "#4c4642ff",
"title_bar.background": "#4c4642ff",
"title_bar.inactive_background": "#3a3735ff",
"toolbar.background": "#282828ff",
"tab_bar.background": "#3a3735ff",
"tab.inactive_background": "#3a3735ff",
@@ -431,7 +430,6 @@
"icon.accent": "#83a598ff",
"status_bar.background": "#4c4642ff",
"title_bar.background": "#4c4642ff",
"title_bar.inactive_background": "#393634ff",
"toolbar.background": "#1d2021ff",
"tab_bar.background": "#393634ff",
"tab.inactive_background": "#393634ff",
@@ -815,7 +813,6 @@
"icon.accent": "#83a598ff",
"status_bar.background": "#4c4642ff",
"title_bar.background": "#4c4642ff",
"title_bar.inactive_background": "#3b3735ff",
"toolbar.background": "#32302fff",
"tab_bar.background": "#3b3735ff",
"tab.inactive_background": "#3b3735ff",
@@ -1199,7 +1196,6 @@
"icon.accent": "#0b6678ff",
"status_bar.background": "#d9c8a4ff",
"title_bar.background": "#d9c8a4ff",
"title_bar.inactive_background": "#ecddb4ff",
"toolbar.background": "#fbf1c7ff",
"tab_bar.background": "#ecddb4ff",
"tab.inactive_background": "#ecddb4ff",
@@ -1583,7 +1579,6 @@
"icon.accent": "#0b6678ff",
"status_bar.background": "#d9c8a4ff",
"title_bar.background": "#d9c8a4ff",
"title_bar.inactive_background": "#ecddb5ff",
"toolbar.background": "#f9f5d7ff",
"tab_bar.background": "#ecddb5ff",
"tab.inactive_background": "#ecddb5ff",
@@ -1967,7 +1962,6 @@
"icon.accent": "#0b6678ff",
"status_bar.background": "#d9c8a4ff",
"title_bar.background": "#d9c8a4ff",
"title_bar.inactive_background": "#ecdcb3ff",
"toolbar.background": "#f2e5bcff",
"tab_bar.background": "#ecdcb3ff",
"tab.inactive_background": "#ecdcb3ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#74ade8ff",
"status_bar.background": "#3b414dff",
"title_bar.background": "#3b414dff",
"title_bar.inactive_background": "#2e343eff",
"toolbar.background": "#282c33ff",
"tab_bar.background": "#2f343eff",
"tab.inactive_background": "#2f343eff",
@@ -413,7 +412,6 @@
"icon.accent": "#5c78e2ff",
"status_bar.background": "#dcdcddff",
"title_bar.background": "#dcdcddff",
"title_bar.inactive_background": "#ebebecff",
"toolbar.background": "#fafafaff",
"tab_bar.background": "#ebebecff",
"tab.inactive_background": "#ebebecff",
@@ -493,7 +491,7 @@
"info": "#5c78e2ff",
"info.background": "#e2e2faff",
"info.border": "#cbcdf6ff",
"modified": "#a47a23ff",
"modified": "#dec184ff",
"modified.background": "#faf2e6ff",
"modified.border": "#f4e7d1ff",
"predictive": "#9b9ec6ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#9bced6ff",
"status_bar.background": "#292738ff",
"title_bar.background": "#292738ff",
"title_bar.inactive_background": "#1c1b2aff",
"toolbar.background": "#191724ff",
"tab_bar.background": "#1c1b2aff",
"tab.inactive_background": "#1c1b2aff",
@@ -418,7 +417,6 @@
"icon.accent": "#57949fff",
"status_bar.background": "#dcd8d8ff",
"title_bar.background": "#dcd8d8ff",
"title_bar.inactive_background": "#fef9f2ff",
"toolbar.background": "#faf4edff",
"tab_bar.background": "#fef9f2ff",
"tab.inactive_background": "#fef9f2ff",
@@ -798,7 +796,6 @@
"icon.accent": "#9bced6ff",
"status_bar.background": "#38354eff",
"title_bar.background": "#38354eff",
"title_bar.inactive_background": "#28253cff",
"toolbar.background": "#232136ff",
"tab_bar.background": "#28253cff",
"tab.inactive_background": "#28253cff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#518b8bff",
"status_bar.background": "#333944ff",
"title_bar.background": "#333944ff",
"title_bar.inactive_background": "#2b3038ff",
"toolbar.background": "#282c33ff",
"tab_bar.background": "#2b3038ff",
"tab.inactive_background": "#2b3038ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#278ad1ff",
"status_bar.background": "#073743ff",
"title_bar.background": "#073743ff",
"title_bar.inactive_background": "#04313bff",
"toolbar.background": "#002a35ff",
"tab_bar.background": "#04313bff",
"tab.inactive_background": "#04313bff",
@@ -408,7 +407,6 @@
"icon.accent": "#288bd1ff",
"status_bar.background": "#cfd0c4ff",
"title_bar.background": "#cfd0c4ff",
"title_bar.inactive_background": "#f3eddaff",
"toolbar.background": "#fdf6e3ff",
"tab_bar.background": "#f3eddaff",
"tab.inactive_background": "#f3eddaff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#499befff",
"status_bar.background": "#2a261cff",
"title_bar.background": "#2a261cff",
"title_bar.inactive_background": "#231f16ff",
"toolbar.background": "#1b1810ff",
"tab_bar.background": "#231f16ff",
"tab.inactive_background": "#231f16ff",

View File

@@ -107,7 +107,6 @@ impl ActivityIndicator {
Editor::for_buffer(buffer, Some(project.clone()), cx)
})),
None,
true,
cx,
);
})?;

View File

@@ -18,7 +18,7 @@ path = "src/anthropic.rs"
[dependencies]
anyhow.workspace = true
futures.workspace = true
http_client.workspace = true
http.workspace = true
isahc.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true

View File

@@ -1,36 +1,25 @@
mod supported_countries;
use anyhow::{anyhow, Result};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
use http::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use isahc::config::Configurable;
use serde::{Deserialize, Serialize};
use std::time::Duration;
use std::{convert::TryFrom, time::Duration};
use strum::EnumIter;
pub use supported_countries::*;
pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com";
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model {
#[default]
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")]
#[serde(alias = "claude-3-5-sonnet", rename = "claude-3-5-sonnet-20240620")]
Claude3_5Sonnet,
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")]
#[serde(alias = "claude-3-opus", rename = "claude-3-opus-20240229")]
Claude3Opus,
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")]
#[serde(alias = "claude-3-sonnet", rename = "claude-3-sonnet-20240229")]
Claude3Sonnet,
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")]
#[serde(alias = "claude-3-haiku", rename = "claude-3-haiku-20240307")]
Claude3Haiku,
#[serde(rename = "custom")]
Custom {
name: String,
max_tokens: usize,
/// Override this model with a different Anthropic model for tool calls.
tool_override: Option<String>,
},
}
impl Model {
@@ -44,100 +33,139 @@ impl Model {
} else if id.starts_with("claude-3-haiku") {
Ok(Self::Claude3Haiku)
} else {
Err(anyhow!("invalid model id"))
Err(anyhow!("Invalid model id: {}", id))
}
}
pub fn id(&self) -> &str {
pub fn id(&self) -> &'static str {
match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
Model::Claude3Opus => "claude-3-opus-20240229",
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
Model::Claude3Haiku => "claude-3-opus-20240307",
Self::Custom { name, .. } => name,
}
}
pub fn display_name(&self) -> &str {
pub fn display_name(&self) -> &'static str {
match self {
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::Custom { name, .. } => name,
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Claude3_5Sonnet
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => 200_000,
Self::Custom { max_tokens, .. } => *max_tokens,
}
200_000
}
}
pub fn tool_model_id(&self) -> &str {
if let Self::Custom {
tool_override: Some(tool_override),
..
} = self
{
tool_override
} else {
self.id()
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
}
impl TryFrom<String> for Role {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self> {
match value.as_str() {
"user" => Ok(Self::User),
"assistant" => Ok(Self::Assistant),
_ => Err(anyhow!("invalid role '{value}'")),
}
}
}
pub async fn complete(
client: &dyn HttpClient,
api_url: &str,
api_key: &str,
request: Request,
) -> Result<Response> {
let uri = format!("{api_url}/v1/messages");
let request_builder = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Anthropic-Version", "2023-06-01")
.header("Anthropic-Beta", "tools-2024-04-04")
.header("X-Api-Key", api_key)
.header("Content-Type", "application/json");
let serialized_request = serde_json::to_string(&request)?;
let request = request_builder.body(AsyncBody::from(serialized_request))?;
let mut response = client.send(request).await?;
if response.status().is_success() {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let response_message: Response = serde_json::from_slice(&body)?;
Ok(response_message)
} else {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
Err(anyhow!(
"Failed to connect to API: {} {}",
response.status(),
body_str
))
impl From<Role> for String {
fn from(val: Role) -> Self {
match val {
Role::User => "user".to_owned(),
Role::Assistant => "assistant".to_owned(),
}
}
}
#[derive(Debug, Serialize)]
pub struct Request {
pub model: Model,
pub messages: Vec<RequestMessage>,
pub stream: bool,
pub system: String,
pub max_tokens: u32,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct RequestMessage {
pub role: Role,
pub content: String,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ResponseEvent {
MessageStart {
message: ResponseMessage,
},
ContentBlockStart {
index: u32,
content_block: ContentBlock,
},
Ping {},
ContentBlockDelta {
index: u32,
delta: TextDelta,
},
ContentBlockStop {
index: u32,
},
MessageDelta {
delta: ResponseMessage,
usage: Usage,
},
MessageStop {},
}
#[derive(Deserialize, Debug)]
pub struct ResponseMessage {
#[serde(rename = "type")]
pub message_type: Option<String>,
pub id: Option<String>,
pub role: Option<String>,
pub content: Option<Vec<String>>,
pub model: Option<String>,
pub stop_reason: Option<String>,
pub stop_sequence: Option<String>,
pub usage: Option<Usage>,
}
#[derive(Deserialize, Debug)]
pub struct Usage {
pub input_tokens: Option<u32>,
pub output_tokens: Option<u32>,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlock {
Text { text: String },
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum TextDelta {
TextDelta { text: String },
}
pub async fn stream_completion(
client: &dyn HttpClient,
api_url: &str,
api_key: &str,
request: Request,
low_speed_timeout: Option<Duration>,
) -> Result<BoxStream<'static, Result<Event>>> {
let request = StreamingRequest {
base: request,
stream: true,
};
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let uri = format!("{api_url}/v1/messages");
let mut request_builder = HttpRequest::builder()
.method(Method::POST)
@@ -149,9 +177,7 @@ pub async fn stream_completion(
if let Some(low_speed_timeout) = low_speed_timeout {
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
}
let serialized_request = serde_json::to_string(&request)?;
let request = request_builder.body(AsyncBody::from(serialized_request))?;
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
if response.status().is_success() {
let reader = BufReader::new(response.into_body());
@@ -176,10 +202,10 @@ pub async fn stream_completion(
let body_str = std::str::from_utf8(&body)?;
match serde_json::from_str::<Event>(body_str) {
Ok(Event::Error { error }) => Err(api_error_to_err(error)),
match serde_json::from_str::<ResponseEvent>(body_str) {
Ok(_) => Err(anyhow!(
"Unexpected success response while expecting an error: '{body_str}'",
"Unexpected success response while expecting an error: {}",
body_str,
)),
Err(_) => Err(anyhow!(
"Failed to connect to API: {} {}",
@@ -190,193 +216,42 @@ pub async fn stream_completion(
}
}
pub fn extract_text_from_events(
response: impl Stream<Item = Result<Event>>,
) -> impl Stream<Item = Result<String>> {
response.filter_map(|response| async move {
match response {
Ok(response) => match response {
Event::ContentBlockStart { content_block, .. } => match content_block {
Content::Text { text } => Some(Ok(text)),
_ => None,
},
Event::ContentBlockDelta { delta, .. } => match delta {
ContentDelta::TextDelta { text } => Some(Ok(text)),
_ => None,
},
Event::Error { error } => Some(Err(api_error_to_err(error))),
_ => None,
},
Err(error) => Some(Err(error)),
}
})
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use http::IsahcHttpClient;
fn api_error_to_err(
ApiError {
error_type,
message,
}: ApiError,
) -> anyhow::Error {
anyhow!("API error. Type: '{error_type}', message: '{message}'",)
}
// #[tokio::test]
// async fn stream_completion_success() {
// let http_client = IsahcHttpClient::new().unwrap();
#[derive(Debug, Serialize, Deserialize)]
pub struct Message {
pub role: Role,
pub content: Vec<Content>,
}
// let request = Request {
// model: Model::Claude3Opus,
// messages: vec![RequestMessage {
// role: Role::User,
// content: "Ping".to_string(),
// }],
// stream: true,
// system: "Respond to ping with pong".to_string(),
// max_tokens: 4096,
// };
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
}
// let stream = stream_completion(
// &http_client,
// "https://api.anthropic.com",
// &std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY not set"),
// request,
// )
// .await
// .unwrap();
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum Content {
#[serde(rename = "text")]
Text { text: String },
#[serde(rename = "image")]
Image { source: ImageSource },
#[serde(rename = "tool_use")]
ToolUse {
id: String,
name: String,
input: serde_json::Value,
},
#[serde(rename = "tool_result")]
ToolResult {
tool_use_id: String,
content: String,
},
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ImageSource {
#[serde(rename = "type")]
pub source_type: String,
pub media_type: String,
pub data: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Tool {
pub name: String,
pub description: String,
pub input_schema: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum ToolChoice {
Auto,
Any,
Tool { name: String },
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Request {
pub model: String,
pub max_tokens: u32,
pub messages: Vec<Message>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tools: Vec<Tool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub system: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub stop_sequences: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_k: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
}
#[derive(Debug, Serialize, Deserialize)]
struct StreamingRequest {
#[serde(flatten)]
pub base: Request,
pub stream: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Metadata {
pub user_id: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Usage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub input_tokens: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub output_tokens: Option<u32>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Response {
pub id: String,
#[serde(rename = "type")]
pub response_type: String,
pub role: Role,
pub content: Vec<Content>,
pub model: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub stop_reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub stop_sequence: Option<String>,
pub usage: Usage,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum Event {
#[serde(rename = "message_start")]
MessageStart { message: Response },
#[serde(rename = "content_block_start")]
ContentBlockStart {
index: usize,
content_block: Content,
},
#[serde(rename = "content_block_delta")]
ContentBlockDelta { index: usize, delta: ContentDelta },
#[serde(rename = "content_block_stop")]
ContentBlockStop { index: usize },
#[serde(rename = "message_delta")]
MessageDelta { delta: MessageDelta, usage: Usage },
#[serde(rename = "message_stop")]
MessageStop,
#[serde(rename = "ping")]
Ping,
#[serde(rename = "error")]
Error { error: ApiError },
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum ContentDelta {
#[serde(rename = "text_delta")]
TextDelta { text: String },
#[serde(rename = "input_json_delta")]
InputJsonDelta { partial_json: String },
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MessageDelta {
pub stop_reason: Option<String>,
pub stop_sequence: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ApiError {
#[serde(rename = "type")]
pub error_type: String,
pub message: String,
}
// stream
// .for_each(|event| async {
// match event {
// Ok(event) => println!("{:?}", event),
// Err(e) => eprintln!("Error: {:?}", e),
// }
// })
// .await;
// }
// }

View File

@@ -1,194 +0,0 @@
use std::collections::HashSet;
use std::sync::LazyLock;
/// Returns whether the given country code is supported by Anthropic.
///
/// https://www.anthropic.com/supported-countries
pub fn is_supported_country(country_code: &str) -> bool {
SUPPORTED_COUNTRIES.contains(&country_code)
}
/// The list of country codes supported by Anthropic.
///
/// https://www.anthropic.com/supported-countries
static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
vec![
"AL", // Albania
"DZ", // Algeria
"AD", // Andorra
"AO", // Angola
"AG", // Antigua and Barbuda
"AR", // Argentina
"AM", // Armenia
"AU", // Australia
"AT", // Austria
"AZ", // Azerbaijan
"BS", // Bahamas
"BH", // Bahrain
"BD", // Bangladesh
"BB", // Barbados
"BE", // Belgium
"BZ", // Belize
"BJ", // Benin
"BT", // Bhutan
"BO", // Bolivia
"BA", // Bosnia and Herzegovina
"BW", // Botswana
"BR", // Brazil
"BN", // Brunei
"BG", // Bulgaria
"BF", // Burkina Faso
"BI", // Burundi
"CV", // Cabo Verde
"KH", // Cambodia
"CM", // Cameroon
"CA", // Canada
"TD", // Chad
"CL", // Chile
"CO", // Colombia
"KM", // Comoros
"CG", // Congo (Brazzaville)
"CR", // Costa Rica
"CI", // Côte d'Ivoire
"HR", // Croatia
"CY", // Cyprus
"CZ", // Czechia (Czech Republic)
"DK", // Denmark
"DJ", // Djibouti
"DM", // Dominica
"DO", // Dominican Republic
"EC", // Ecuador
"EG", // Egypt
"SV", // El Salvador
"GQ", // Equatorial Guinea
"EE", // Estonia
"SZ", // Eswatini
"FJ", // Fiji
"FI", // Finland
"FR", // France
"GA", // Gabon
"GM", // Gambia
"GE", // Georgia
"DE", // Germany
"GH", // Ghana
"GR", // Greece
"GD", // Grenada
"GT", // Guatemala
"GN", // Guinea
"GW", // Guinea-Bissau
"GY", // Guyana
"HT", // Haiti
"HN", // Honduras
"HU", // Hungary
"IS", // Iceland
"IN", // India
"ID", // Indonesia
"IQ", // Iraq
"IE", // Ireland
"IL", // Israel
"IT", // Italy
"JM", // Jamaica
"JP", // Japan
"JO", // Jordan
"KZ", // Kazakhstan
"KE", // Kenya
"KI", // Kiribati
"KW", // Kuwait
"KG", // Kyrgyzstan
"LA", // Laos
"LV", // Latvia
"LB", // Lebanon
"LS", // Lesotho
"LR", // Liberia
"LI", // Liechtenstein
"LT", // Lithuania
"LU", // Luxembourg
"MG", // Madagascar
"MW", // Malawi
"MY", // Malaysia
"MV", // Maldives
"MT", // Malta
"MH", // Marshall Islands
"MR", // Mauritania
"MU", // Mauritius
"MX", // Mexico
"FM", // Micronesia
"MD", // Moldova
"MC", // Monaco
"MN", // Mongolia
"ME", // Montenegro
"MA", // Morocco
"MZ", // Mozambique
"NA", // Namibia
"NR", // Nauru
"NP", // Nepal
"NL", // Netherlands
"NZ", // New Zealand
"NE", // Niger
"NG", // Nigeria
"MK", // North Macedonia
"NO", // Norway
"OM", // Oman
"PK", // Pakistan
"PW", // Palau
"PS", // Palestine
"PA", // Panama
"PG", // Papua New Guinea
"PY", // Paraguay
"PE", // Peru
"PH", // Philippines
"PL", // Poland
"PT", // Portugal
"QA", // Qatar
"RO", // Romania
"RW", // Rwanda
"KN", // Saint Kitts and Nevis
"LC", // Saint Lucia
"VC", // Saint Vincent and the Grenadines
"WS", // Samoa
"SM", // San Marino
"ST", // São Tomé and Príncipe
"SA", // Saudi Arabia
"SN", // Senegal
"RS", // Serbia
"SC", // Seychelles
"SL", // Sierra Leone
"SG", // Singapore
"SK", // Slovakia
"SI", // Slovenia
"SB", // Solomon Islands
"ZA", // South Africa
"KR", // South Korea
"ES", // Spain
"LK", // Sri Lanka
"SR", // Suriname
"SE", // Sweden
"CH", // Switzerland
"TW", // Taiwan
"TJ", // Tajikistan
"TZ", // Tanzania
"TH", // Thailand
"TL", // Timor-Leste
"TG", // Togo
"TO", // Tonga
"TT", // Trinidad and Tobago
"TN", // Tunisia
"TR", // Türkiye (Turkey)
"TM", // Turkmenistan
"TV", // Tuvalu
"UG", // Uganda
"UA", // Ukraine (except Crimea, Donetsk, and Luhansk regions)
"AE", // United Arab Emirates
"GB", // United Kingdom
"US", // United States of America
"UY", // Uruguay
"UZ", // Uzbekistan
"VU", // Vanuatu
"VA", // Vatican City
"VN", // Vietnam
"ZM", // Zambia
"ZW", // Zimbabwe
]
.into_iter()
.collect()
});

View File

@@ -11,7 +11,6 @@ use rust_embed::RustEmbed;
#[include = "themes/**/*"]
#[exclude = "themes/src/*"]
#[include = "sounds/**/*"]
#[include = "prompts/**/*"]
#[include = "*.md"]
#[exclude = "*.DS_Store"]
pub struct Assets;

View File

@@ -12,43 +12,30 @@ workspace = true
path = "src/assistant.rs"
doctest = false
[features]
test-support = [
"editor/test-support",
"language/test-support",
"project/test-support",
"text/test-support",
]
[dependencies]
anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
assets.workspace = true
assistant_slash_command.workspace = true
async-watch.workspace = true
breadcrumbs.workspace = true
cargo_toml.workspace = true
chrono.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
db.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
handlebars.workspace = true
heed.workspace = true
html_to_markdown.workspace = true
http_client.workspace = true
http.workspace = true
indexed_docs.workspace = true
indoc.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
markdown.workspace = true
menu.workspace = true
multi_buffer.workspace = true
ollama = { workspace = true, features = ["schemars"] }
@@ -57,7 +44,6 @@ ordered-float.workspace = true
parking_lot.workspace = true
paths.workspace = true
project.workspace = true
proto.workspace = true
regex.workspace = true
rope.workspace = true
schemars.workspace = true
@@ -66,29 +52,27 @@ semantic_index.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
similar.workspace = true
smol.workspace = true
strsim = "0.11"
strum.workspace = true
telemetry_events.workspace = true
terminal.workspace = true
terminal_view.workspace = true
text.workspace = true
theme.workspace = true
tiktoken-rs.workspace = true
toml.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
workspace.workspace = true
picker.workspace = true
zed_actions.workspace = true
[dev-dependencies]
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
language = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, features = ["test-support"] }
log.workspace = true
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
serde_json_lenient.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -38,7 +38,7 @@ Considering these aspects will ensure our conversation view design is optimized
@nate> 2 feels like it isn't important at the moment, we can explore that later. Let's start with 4, which I think will lead us to discussion 3 and 5.
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multiple people, or between multiple people and multiple bots (you and other bots).
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multuple peoople, or between multiple people and multiple bots (you and other bots).
@nathan> Agreed. I'm interested in threading I think more than anything. Or 4 yeah. I think we need to scope the threading conversation. Also, asking #zed to propose the solution... not sure it will be that effective but it's worth a try...

View File

@@ -1,45 +1,41 @@
#![cfg_attr(target_os = "windows", allow(unused, dead_code))]
pub mod assistant_panel;
pub mod assistant_settings;
mod context;
pub mod context_store;
mod completion_provider;
mod context_store;
mod inline_assistant;
mod model_selector;
mod prompt_library;
mod prompts;
mod search;
mod slash_command;
mod streaming_diff;
mod terminal_inline_assistant;
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
use assistant_settings::AssistantSettings;
use assistant_settings::{AnthropicModel, AssistantSettings, CloudModel, OllamaModel, OpenAiModel};
use assistant_slash_command::SlashCommandRegistry;
use client::{proto, Client};
use command_palette_hooks::CommandPaletteFilter;
pub use context::*;
pub use context_store::*;
use feature_flags::FeatureFlagAppExt;
pub(crate) use completion_provider::*;
pub(crate) use context_store::*;
use fs::Fs;
use gpui::{actions, impl_actions, AppContext, Global, SharedString, UpdateGlobal};
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
use indexed_docs::IndexedDocsRegistry;
pub(crate) use inline_assistant::*;
use language_model::{
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
};
pub(crate) use model_selector::*;
pub use prompts::PromptBuilder;
use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
use serde::{Deserialize, Serialize};
use settings::{update_settings_file, Settings, SettingsStore};
use settings::{Settings, SettingsStore};
use slash_command::{
active_command, default_command, diagnostics_command, docs_command, fetch_command,
file_command, now_command, project_command, prompt_command, search_command, symbols_command,
tabs_command, term_command, workflow_command,
file_command, now_command, project_command, prompt_command, search_command, tabs_command,
term_command,
};
use std::{
fmt::{self, Display},
sync::Arc,
};
use std::sync::Arc;
pub(crate) use streaming_diff::*;
use util::ResultExt;
actions!(
assistant,
@@ -50,34 +46,168 @@ actions!(
QuoteSelection,
InsertIntoEditor,
ToggleFocus,
ResetKey,
InlineAssist,
InsertActivePrompt,
ShowConfiguration,
DeployHistory,
DeployPromptLibrary,
ApplyEdit,
ConfirmCommand,
ToggleModelSelector,
DebugWorkflowSteps
ToggleModelSelector
]
);
const DEFAULT_CONTEXT_LINES: usize = 20;
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
struct MessageId(usize);
#[derive(Clone, Default, Deserialize, PartialEq)]
pub struct InlineAssist {
prompt: Option<String>,
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
System,
}
impl_actions!(assistant, [InlineAssist]);
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct MessageId(clock::Lamport);
impl MessageId {
pub fn as_u64(self) -> u64 {
self.0.as_u64()
impl Role {
pub fn cycle(&mut self) {
*self = match self {
Role::User => Role::Assistant,
Role::Assistant => Role::System,
Role::System => Role::User,
}
}
}
impl Display for Role {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Role::User => write!(f, "user"),
Role::Assistant => write!(f, "assistant"),
Role::System => write!(f, "system"),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub enum LanguageModel {
Cloud(CloudModel),
OpenAi(OpenAiModel),
Anthropic(AnthropicModel),
Ollama(OllamaModel),
}
impl Default for LanguageModel {
fn default() -> Self {
LanguageModel::Cloud(CloudModel::default())
}
}
impl LanguageModel {
pub fn telemetry_id(&self) -> String {
match self {
LanguageModel::OpenAi(model) => format!("openai/{}", model.id()),
LanguageModel::Anthropic(model) => format!("anthropic/{}", model.id()),
LanguageModel::Cloud(model) => format!("zed.dev/{}", model.id()),
LanguageModel::Ollama(model) => format!("ollama/{}", model.id()),
}
}
pub fn display_name(&self) -> String {
match self {
LanguageModel::OpenAi(model) => model.display_name().into(),
LanguageModel::Anthropic(model) => model.display_name().into(),
LanguageModel::Cloud(model) => model.display_name().into(),
LanguageModel::Ollama(model) => model.display_name().into(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
LanguageModel::OpenAi(model) => model.max_token_count(),
LanguageModel::Anthropic(model) => model.max_token_count(),
LanguageModel::Cloud(model) => model.max_token_count(),
LanguageModel::Ollama(model) => model.max_token_count(),
}
}
pub fn id(&self) -> &str {
match self {
LanguageModel::OpenAi(model) => model.id(),
LanguageModel::Anthropic(model) => model.id(),
LanguageModel::Cloud(model) => model.id(),
LanguageModel::Ollama(model) => model.id(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct LanguageModelRequestMessage {
pub role: Role,
pub content: String,
}
impl LanguageModelRequestMessage {
pub fn to_proto(&self) -> proto::LanguageModelRequestMessage {
proto::LanguageModelRequestMessage {
role: match self.role {
Role::User => proto::LanguageModelRole::LanguageModelUser,
Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant,
Role::System => proto::LanguageModelRole::LanguageModelSystem,
} as i32,
content: self.content.clone(),
tool_calls: Vec::new(),
tool_call_id: None,
}
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct LanguageModelRequest {
pub model: LanguageModel,
pub messages: Vec<LanguageModelRequestMessage>,
pub stop: Vec<String>,
pub temperature: f32,
}
impl LanguageModelRequest {
pub fn to_proto(&self) -> proto::CompleteWithLanguageModel {
proto::CompleteWithLanguageModel {
model: self.model.id().to_string(),
messages: self.messages.iter().map(|m| m.to_proto()).collect(),
stop: self.stop.clone(),
temperature: self.temperature,
tool_choice: None,
tools: Vec::new(),
}
}
/// Before we send the request to the server, we can perform fixups on it appropriate to the model.
pub fn preprocess(&mut self) {
match &self.model {
LanguageModel::OpenAi(_) => {}
LanguageModel::Anthropic(_) => {}
LanguageModel::Ollama(_) => {}
LanguageModel::Cloud(model) => match model {
CloudModel::Claude3Opus
| CloudModel::Claude3Sonnet
| CloudModel::Claude3Haiku
| CloudModel::Claude3_5Sonnet => {
preprocess_anthropic_request(self);
}
_ => {}
},
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct LanguageModelResponseMessage {
pub role: Option<Role>,
pub content: Option<String>,
}
#[derive(Deserialize, Debug)]
pub struct LanguageModelUsage {
pub prompt_tokens: u32,
@@ -92,48 +222,19 @@ pub struct LanguageModelChoiceDelta {
pub finish_reason: Option<String>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum MessageStatus {
#[derive(Clone, Debug, Serialize, Deserialize)]
struct MessageMetadata {
role: Role,
status: MessageStatus,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum MessageStatus {
Pending,
Done,
Error(SharedString),
}
impl MessageStatus {
pub fn from_proto(status: proto::ContextMessageStatus) -> MessageStatus {
match status.variant {
Some(proto::context_message_status::Variant::Pending(_)) => MessageStatus::Pending,
Some(proto::context_message_status::Variant::Done(_)) => MessageStatus::Done,
Some(proto::context_message_status::Variant::Error(error)) => {
MessageStatus::Error(error.message.into())
}
None => MessageStatus::Pending,
}
}
pub fn to_proto(&self) -> proto::ContextMessageStatus {
match self {
MessageStatus::Pending => proto::ContextMessageStatus {
variant: Some(proto::context_message_status::Variant::Pending(
proto::context_message_status::Pending {},
)),
},
MessageStatus::Done => proto::ContextMessageStatus {
variant: Some(proto::context_message_status::Variant::Done(
proto::context_message_status::Done {},
)),
},
MessageStatus::Error(message) => proto::ContextMessageStatus {
variant: Some(proto::context_message_status::Variant::Error(
proto::context_message_status::Error {
message: message.to_string(),
},
)),
},
}
}
}
/// The state pertaining to the Assistant.
#[derive(Default)]
struct Assistant {
@@ -167,20 +268,10 @@ impl Assistant {
}
}
pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) -> Arc<PromptBuilder> {
pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
cx.set_global(Assistant::default());
AssistantSettings::register(cx);
// TODO: remove this when 0.148.0 is released.
if AssistantSettings::get_global(cx).using_outdated_settings_version {
update_settings_file::<AssistantSettings>(fs.clone(), cx, {
let fs = fs.clone();
|content, cx| {
content.update_file(fs, cx);
}
});
}
cx.spawn(|mut cx| {
let client = client.clone();
async move {
@@ -196,29 +287,13 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) -> Arc<Pr
})
.detach();
context_store::init(&client);
prompt_library::init(cx);
init_language_model_settings(cx);
completion_provider::init(client.clone(), cx);
assistant_slash_command::init(cx);
register_slash_commands(cx);
assistant_panel::init(cx);
let prompt_builder = prompts::PromptBuilder::new(Some((fs.clone(), cx)))
.log_err()
.map(Arc::new)
.unwrap_or_else(|| Arc::new(prompts::PromptBuilder::new(None).unwrap()));
register_slash_commands(Some(prompt_builder.clone()), cx);
inline_assistant::init(
fs.clone(),
prompt_builder.clone(),
client.telemetry().clone(),
cx,
);
terminal_inline_assistant::init(
fs.clone(),
prompt_builder.clone(),
client.telemetry().clone(),
cx,
);
inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
terminal_inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
IndexedDocsRegistry::init_global(cx);
CommandPaletteFilter::update_global(cx, |filter, _cx| {
@@ -236,65 +311,22 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) -> Arc<Pr
});
})
.detach();
prompt_builder
}
fn init_language_model_settings(cx: &mut AppContext) {
update_active_language_model_from_settings(cx);
cx.observe_global::<SettingsStore>(update_active_language_model_from_settings)
.detach();
cx.subscribe(
&LanguageModelRegistry::global(cx),
|_, event: &language_model::Event, cx| match event {
language_model::Event::ProviderStateChanged
| language_model::Event::AddedProvider(_)
| language_model::Event::RemovedProvider(_) => {
update_active_language_model_from_settings(cx);
}
_ => {}
},
)
.detach();
}
fn update_active_language_model_from_settings(cx: &mut AppContext) {
let settings = AssistantSettings::get_global(cx);
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
let model_id = LanguageModelId::from(settings.default_model.model.clone());
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.select_active_model(&provider_name, &model_id, cx);
});
}
fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut AppContext) {
fn register_slash_commands(cx: &mut AppContext) {
let slash_command_registry = SlashCommandRegistry::global(cx);
slash_command_registry.register_command(file_command::FileSlashCommand, true);
slash_command_registry.register_command(active_command::ActiveSlashCommand, true);
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
slash_command_registry.register_command(tabs_command::TabsSlashCommand, true);
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
slash_command_registry.register_command(search_command::SearchSlashCommand, true);
slash_command_registry.register_command(prompt_command::PromptSlashCommand, true);
slash_command_registry.register_command(default_command::DefaultSlashCommand, true);
slash_command_registry.register_command(term_command::TermSlashCommand, true);
slash_command_registry.register_command(now_command::NowSlashCommand, true);
slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true);
slash_command_registry.register_command(docs_command::DocsSlashCommand, true);
if let Some(prompt_builder) = prompt_builder {
slash_command_registry.register_command(
workflow_command::WorkflowSlashCommand::new(prompt_builder),
true,
);
}
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
cx.observe_flag::<search_command::SearchSlashCommandFeatureFlag, _>(move |is_enabled, _cx| {
if is_enabled {
slash_command_registry.register_command(search_command::SearchSlashCommand, true);
}
})
.detach();
}
pub fn humanize_token_count(count: usize) -> String {

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,154 @@
use std::sync::Arc;
use std::fmt;
use anthropic::Model as AnthropicModel;
use fs::Fs;
use gpui::{AppContext, Pixels};
use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
use ollama::Model as OllamaModel;
use open_ai::Model as OpenAiModel;
use schemars::{schema::Schema, JsonSchema};
use serde::{Deserialize, Serialize};
use settings::{update_settings_file, Settings, SettingsSources};
use crate::{preprocess_anthropic_request, LanguageModel, LanguageModelRequest};
pub use anthropic::Model as AnthropicModel;
use gpui::Pixels;
pub use ollama::Model as OllamaModel;
pub use open_ai::Model as OpenAiModel;
use schemars::{
schema::{InstanceType, Metadata, Schema, SchemaObject},
JsonSchema,
};
use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use settings::{Settings, SettingsSources};
use strum::{EnumIter, IntoEnumIterator};
#[derive(Clone, Debug, Default, PartialEq, EnumIter)]
pub enum CloudModel {
Gpt3Point5Turbo,
Gpt4,
Gpt4Turbo,
#[default]
Gpt4Omni,
Claude3_5Sonnet,
Claude3Opus,
Claude3Sonnet,
Claude3Haiku,
Custom(String),
}
impl Serialize for CloudModel {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.id())
}
}
impl<'de> Deserialize<'de> for CloudModel {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ZedDotDevModelVisitor;
impl<'de> Visitor<'de> for ZedDotDevModelVisitor {
type Value = CloudModel;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string for a ZedDotDevModel variant or a custom model")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let model = CloudModel::iter()
.find(|model| model.id() == value)
.unwrap_or_else(|| CloudModel::Custom(value.to_string()));
Ok(model)
}
}
deserializer.deserialize_str(ZedDotDevModelVisitor)
}
}
impl JsonSchema for CloudModel {
fn schema_name() -> String {
"ZedDotDevModel".to_owned()
}
fn json_schema(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
let variants = CloudModel::iter()
.filter_map(|model| {
let id = model.id();
if id.is_empty() {
None
} else {
Some(id.to_string())
}
})
.collect::<Vec<_>>();
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::String.into()),
enum_values: Some(variants.iter().map(|s| s.clone().into()).collect()),
metadata: Some(Box::new(Metadata {
title: Some("ZedDotDevModel".to_owned()),
default: Some(CloudModel::default().id().into()),
examples: variants.into_iter().map(Into::into).collect(),
..Default::default()
})),
..Default::default()
})
}
}
impl CloudModel {
pub fn id(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4Turbo => "gpt-4-turbo-preview",
Self::Gpt4Omni => "gpt-4o",
Self::Claude3_5Sonnet => "claude-3-5-sonnet",
Self::Claude3Opus => "claude-3-opus",
Self::Claude3Sonnet => "claude-3-sonnet",
Self::Claude3Haiku => "claude-3-haiku",
Self::Custom(id) => id,
}
}
pub fn display_name(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "GPT 3.5 Turbo",
Self::Gpt4 => "GPT 4",
Self::Gpt4Turbo => "GPT 4 Turbo",
Self::Gpt4Omni => "GPT 4 Omni",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::Custom(id) => id.as_str(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Gpt3Point5Turbo => 2048,
Self::Gpt4 => 4096,
Self::Gpt4Turbo | Self::Gpt4Omni => 128000,
Self::Claude3_5Sonnet
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => 200000,
Self::Custom(_) => 4096, // TODO: Make this configurable
}
}
pub fn preprocess_request(&self, request: &mut LanguageModelRequest) {
match self {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => {
preprocess_anthropic_request(request)
}
_ => {}
}
}
}
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
@@ -19,9 +159,43 @@ pub enum AssistantDockPosition {
Bottom,
}
#[derive(Debug, PartialEq)]
pub enum AssistantProvider {
ZedDotDev {
model: CloudModel,
},
OpenAi {
model: OpenAiModel,
api_url: String,
low_speed_timeout_in_seconds: Option<u64>,
available_models: Vec<OpenAiModel>,
},
Anthropic {
model: AnthropicModel,
api_url: String,
low_speed_timeout_in_seconds: Option<u64>,
},
Ollama {
model: OllamaModel,
api_url: String,
low_speed_timeout_in_seconds: Option<u64>,
},
}
impl Default for AssistantProvider {
fn default() -> Self {
Self::OpenAi {
model: OpenAiModel::default(),
api_url: open_ai::OPEN_AI_API_URL.into(),
low_speed_timeout_in_seconds: None,
available_models: Default::default(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
#[serde(tag = "name", rename_all = "snake_case")]
pub enum AssistantProviderContentV1 {
pub enum AssistantProviderContent {
#[serde(rename = "zed.dev")]
ZedDotDev { default_model: Option<CloudModel> },
#[serde(rename = "openai")]
@@ -52,8 +226,7 @@ pub struct AssistantSettings {
pub dock: AssistantDockPosition,
pub default_width: Pixels,
pub default_height: Pixels,
pub default_model: LanguageModelSelection,
pub using_outdated_settings_version: bool,
pub provider: AssistantProvider,
}
/// Assistant panel settings
@@ -85,161 +258,34 @@ impl Default for AssistantSettingsContent {
}
impl AssistantSettingsContent {
pub fn is_version_outdated(&self) -> bool {
fn upgrade(&self) -> AssistantSettingsContentV1 {
match self {
AssistantSettingsContent::Versioned(settings) => match settings {
VersionedAssistantSettingsContent::V1(_) => true,
VersionedAssistantSettingsContent::V2(_) => false,
VersionedAssistantSettingsContent::V1(settings) => settings.clone(),
},
AssistantSettingsContent::Legacy(_) => true,
}
}
pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
if let AssistantSettingsContent::Versioned(settings) = self {
if let VersionedAssistantSettingsContent::V1(settings) = settings {
if let Some(provider) = settings.provider.clone() {
match provider {
AssistantProviderContentV1::Anthropic {
api_url,
low_speed_timeout_in_seconds,
..
} => update_settings_file::<AllLanguageModelSettings>(
fs,
cx,
move |content, _| {
if content.anthropic.is_none() {
content.anthropic =
Some(language_model::settings::AnthropicSettingsContent::Versioned(
language_model::settings::VersionedAnthropicSettingsContent::V1(
language_model::settings::AnthropicSettingsContentV1 {
api_url,
low_speed_timeout_in_seconds,
available_models: None
}
)
));
}
},
),
AssistantProviderContentV1::Ollama {
api_url,
low_speed_timeout_in_seconds,
..
} => update_settings_file::<AllLanguageModelSettings>(
fs,
cx,
move |content, _| {
if content.ollama.is_none() {
content.ollama =
Some(language_model::settings::OllamaSettingsContent {
api_url,
low_speed_timeout_in_seconds,
});
}
},
),
AssistantProviderContentV1::OpenAi {
api_url,
low_speed_timeout_in_seconds,
available_models,
..
} => update_settings_file::<AllLanguageModelSettings>(
fs,
cx,
move |content, _| {
if content.openai.is_none() {
let available_models = available_models.map(|models| {
models
.into_iter()
.filter_map(|model| match model {
open_ai::Model::Custom { name, max_tokens } => {
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens })
}
_ => None,
})
.collect::<Vec<_>>()
});
content.openai =
Some(language_model::settings::OpenAiSettingsContent::Versioned(
language_model::settings::VersionedOpenAiSettingsContent::V1(
language_model::settings::OpenAiSettingsContentV1 {
api_url,
low_speed_timeout_in_seconds,
available_models
}
)
));
}
},
),
_ => {}
}
}
}
}
*self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
self.upgrade(),
));
}
fn upgrade(&self) -> AssistantSettingsContentV2 {
match self {
AssistantSettingsContent::Versioned(settings) => match settings {
VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
enabled: settings.enabled,
button: settings.button,
dock: settings.dock,
default_width: settings.default_width,
default_height: settings.default_width,
default_model: settings
.provider
.clone()
.and_then(|provider| match provider {
AssistantProviderContentV1::ZedDotDev { default_model } => {
default_model.map(|model| LanguageModelSelection {
provider: "zed.dev".to_string(),
model: model.id().to_string(),
})
}
AssistantProviderContentV1::OpenAi { default_model, .. } => {
default_model.map(|model| LanguageModelSelection {
provider: "openai".to_string(),
model: model.id().to_string(),
})
}
AssistantProviderContentV1::Anthropic { default_model, .. } => {
default_model.map(|model| LanguageModelSelection {
provider: "anthropic".to_string(),
model: model.id().to_string(),
})
}
AssistantProviderContentV1::Ollama { default_model, .. } => {
default_model.map(|model| LanguageModelSelection {
provider: "ollama".to_string(),
model: model.id().to_string(),
})
}
}),
},
VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
},
AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV1 {
enabled: None,
button: settings.button,
dock: settings.dock,
default_width: settings.default_width,
default_height: settings.default_height,
default_model: Some(LanguageModelSelection {
provider: "openai".to_string(),
model: settings
.default_open_ai_model
.clone()
.unwrap_or_default()
.id()
.to_string(),
}),
provider: if let Some(open_ai_api_url) = settings.openai_api_url.as_ref() {
Some(AssistantProviderContent::OpenAi {
default_model: settings.default_open_ai_model.clone(),
api_url: Some(open_ai_api_url.clone()),
low_speed_timeout_in_seconds: None,
available_models: Some(Default::default()),
})
} else {
settings.default_open_ai_model.clone().map(|open_ai_model| {
AssistantProviderContent::OpenAi {
default_model: Some(open_ai_model),
api_url: None,
low_speed_timeout_in_seconds: None,
available_models: Some(Default::default()),
}
})
},
},
}
}
@@ -250,9 +296,6 @@ impl AssistantSettingsContent {
VersionedAssistantSettingsContent::V1(settings) => {
settings.dock = Some(dock);
}
VersionedAssistantSettingsContent::V2(settings) => {
settings.dock = Some(dock);
}
},
AssistantSettingsContent::Legacy(settings) => {
settings.dock = Some(dock);
@@ -260,76 +303,74 @@ impl AssistantSettingsContent {
}
}
pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
let model = language_model.id().0.to_string();
let provider = language_model.provider_id().0.to_string();
pub fn set_model(&mut self, new_model: LanguageModel) {
match self {
AssistantSettingsContent::Versioned(settings) => match settings {
VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
"zed.dev" => {
log::warn!("attempted to set zed.dev model on outdated settings");
VersionedAssistantSettingsContent::V1(settings) => match &mut settings.provider {
Some(AssistantProviderContent::ZedDotDev {
default_model: model,
}) => {
if let LanguageModel::Cloud(new_model) = new_model {
*model = Some(new_model);
}
}
"anthropic" => {
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
Some(AssistantProviderContentV1::Anthropic {
api_url,
low_speed_timeout_in_seconds,
..
}) => (api_url.clone(), *low_speed_timeout_in_seconds),
_ => (None, None),
};
settings.provider = Some(AssistantProviderContentV1::Anthropic {
default_model: AnthropicModel::from_id(&model).ok(),
api_url,
low_speed_timeout_in_seconds,
});
Some(AssistantProviderContent::OpenAi {
default_model: model,
..
}) => {
if let LanguageModel::OpenAi(new_model) = new_model {
*model = Some(new_model);
}
}
"ollama" => {
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
Some(AssistantProviderContentV1::Ollama {
api_url,
low_speed_timeout_in_seconds,
..
}) => (api_url.clone(), *low_speed_timeout_in_seconds),
_ => (None, None),
};
settings.provider = Some(AssistantProviderContentV1::Ollama {
default_model: Some(ollama::Model::new(&model)),
api_url,
low_speed_timeout_in_seconds,
});
Some(AssistantProviderContent::Anthropic {
default_model: model,
..
}) => {
if let LanguageModel::Anthropic(new_model) = new_model {
*model = Some(new_model);
}
}
"openai" => {
let (api_url, low_speed_timeout_in_seconds, available_models) =
match &settings.provider {
Some(AssistantProviderContentV1::OpenAi {
api_url,
low_speed_timeout_in_seconds,
available_models,
..
}) => (
api_url.clone(),
*low_speed_timeout_in_seconds,
available_models.clone(),
),
_ => (None, None, None),
};
settings.provider = Some(AssistantProviderContentV1::OpenAi {
default_model: open_ai::Model::from_id(&model).ok(),
api_url,
low_speed_timeout_in_seconds,
available_models,
});
Some(AssistantProviderContent::Ollama {
default_model: model,
..
}) => {
if let LanguageModel::Ollama(new_model) = new_model {
*model = Some(new_model);
}
}
_ => {}
provider => match new_model {
LanguageModel::Cloud(model) => {
*provider = Some(AssistantProviderContent::ZedDotDev {
default_model: Some(model),
})
}
LanguageModel::OpenAi(model) => {
*provider = Some(AssistantProviderContent::OpenAi {
default_model: Some(model),
api_url: None,
low_speed_timeout_in_seconds: None,
available_models: Some(Default::default()),
})
}
LanguageModel::Anthropic(model) => {
*provider = Some(AssistantProviderContent::Anthropic {
default_model: Some(model),
api_url: None,
low_speed_timeout_in_seconds: None,
})
}
LanguageModel::Ollama(model) => {
*provider = Some(AssistantProviderContent::Ollama {
default_model: Some(model),
api_url: None,
low_speed_timeout_in_seconds: None,
})
}
},
},
VersionedAssistantSettingsContent::V2(settings) => {
settings.default_model = Some(LanguageModelSelection { provider, model });
}
},
AssistantSettingsContent::Legacy(settings) => {
if let Ok(model) = open_ai::Model::from_id(&language_model.id().0) {
if let LanguageModel::OpenAi(model) = new_model {
settings.default_open_ai_model = Some(model);
}
}
@@ -342,80 +383,21 @@ impl AssistantSettingsContent {
pub enum VersionedAssistantSettingsContent {
#[serde(rename = "1")]
V1(AssistantSettingsContentV1),
#[serde(rename = "2")]
V2(AssistantSettingsContentV2),
}
impl Default for VersionedAssistantSettingsContent {
fn default() -> Self {
Self::V2(AssistantSettingsContentV2 {
Self::V1(AssistantSettingsContentV1 {
enabled: None,
button: None,
dock: None,
default_width: None,
default_height: None,
default_model: None,
provider: None,
})
}
}
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
pub struct AssistantSettingsContentV2 {
/// Whether the Assistant is enabled.
///
/// Default: true
enabled: Option<bool>,
/// Whether to show the assistant panel button in the status bar.
///
/// Default: true
button: Option<bool>,
/// Where to dock the assistant.
///
/// Default: right
dock: Option<AssistantDockPosition>,
/// Default width in pixels when the assistant is docked to the left or right.
///
/// Default: 640
default_width: Option<f32>,
/// Default height in pixels when the assistant is docked to the bottom.
///
/// Default: 320
default_height: Option<f32>,
/// The default model to use when creating new contexts.
default_model: Option<LanguageModelSelection>,
}
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
pub struct LanguageModelSelection {
#[schemars(schema_with = "providers_schema")]
pub provider: String,
pub model: String,
}
fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
schemars::schema::SchemaObject {
enum_values: Some(vec![
"anthropic".into(),
"google".into(),
"ollama".into(),
"openai".into(),
"zed.dev".into(),
"copilot_chat".into(),
]),
..Default::default()
}
.into()
}
impl Default for LanguageModelSelection {
fn default() -> Self {
Self {
provider: "openai".to_string(),
model: "gpt-4".to_string(),
}
}
}
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
pub struct AssistantSettingsContentV1 {
/// Whether the Assistant is enabled.
@@ -440,9 +422,9 @@ pub struct AssistantSettingsContentV1 {
default_height: Option<f32>,
/// The provider of the assistant service.
///
/// This can be "openai", "anthropic", "ollama", "zed.dev"
/// This can either be the internal `zed.dev` service or an external `openai` service,
/// each with their respective default models and configurations.
provider: Option<AssistantProviderContentV1>,
provider: Option<AssistantProviderContent>,
}
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
@@ -476,8 +458,6 @@ pub struct LegacyAssistantSettingsContent {
impl Settings for AssistantSettings {
const KEY: Option<&'static str> = Some("assistant");
const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
type FileContent = AssistantSettingsContent;
fn load(
@@ -487,10 +467,6 @@ impl Settings for AssistantSettings {
let mut settings = AssistantSettings::default();
for value in sources.defaults_and_customizations() {
if value.is_version_outdated() {
settings.using_outdated_settings_version = true;
}
let value = value.upgrade();
merge(&mut settings.enabled, value.enabled);
merge(&mut settings.button, value.button);
@@ -503,10 +479,123 @@ impl Settings for AssistantSettings {
&mut settings.default_height,
value.default_height.map(Into::into),
);
merge(
&mut settings.default_model,
value.default_model.map(Into::into),
);
if let Some(provider) = value.provider.clone() {
match (&mut settings.provider, provider) {
(
AssistantProvider::ZedDotDev { model },
AssistantProviderContent::ZedDotDev {
default_model: model_override,
},
) => {
merge(model, model_override);
}
(
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
},
AssistantProviderContent::OpenAi {
default_model: model_override,
api_url: api_url_override,
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
available_models: available_models_override,
},
) => {
merge(model, model_override);
merge(api_url, api_url_override);
merge(available_models, available_models_override);
if let Some(low_speed_timeout_in_seconds_override) =
low_speed_timeout_in_seconds_override
{
*low_speed_timeout_in_seconds =
Some(low_speed_timeout_in_seconds_override);
}
}
(
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
},
AssistantProviderContent::Ollama {
default_model: model_override,
api_url: api_url_override,
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
},
) => {
merge(model, model_override);
merge(api_url, api_url_override);
if let Some(low_speed_timeout_in_seconds_override) =
low_speed_timeout_in_seconds_override
{
*low_speed_timeout_in_seconds =
Some(low_speed_timeout_in_seconds_override);
}
}
(
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
},
AssistantProviderContent::Anthropic {
default_model: model_override,
api_url: api_url_override,
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
},
) => {
merge(model, model_override);
merge(api_url, api_url_override);
if let Some(low_speed_timeout_in_seconds_override) =
low_speed_timeout_in_seconds_override
{
*low_speed_timeout_in_seconds =
Some(low_speed_timeout_in_seconds_override);
}
}
(provider, provider_override) => {
*provider = match provider_override {
AssistantProviderContent::ZedDotDev {
default_model: model,
} => AssistantProvider::ZedDotDev {
model: model.unwrap_or_default(),
},
AssistantProviderContent::OpenAi {
default_model: model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => AssistantProvider::OpenAi {
model: model.unwrap_or_default(),
api_url: api_url.unwrap_or_else(|| open_ai::OPEN_AI_API_URL.into()),
low_speed_timeout_in_seconds,
available_models: available_models.unwrap_or_default(),
},
AssistantProviderContent::Anthropic {
default_model: model,
api_url,
low_speed_timeout_in_seconds,
} => AssistantProvider::Anthropic {
model: model.unwrap_or_default(),
api_url: api_url
.unwrap_or_else(|| anthropic::ANTHROPIC_API_URL.into()),
low_speed_timeout_in_seconds,
},
AssistantProviderContent::Ollama {
default_model: model,
api_url,
low_speed_timeout_in_seconds,
} => AssistantProvider::Ollama {
model: model.unwrap_or_default(),
api_url: api_url.unwrap_or_else(|| ollama::OLLAMA_API_URL.into()),
low_speed_timeout_in_seconds,
},
};
}
}
}
}
Ok(settings)
@@ -521,68 +610,94 @@ fn merge<T>(target: &mut T, value: Option<T>) {
#[cfg(test)]
mod tests {
use gpui::{ReadGlobal, TestAppContext};
use gpui::{AppContext, UpdateGlobal};
use settings::SettingsStore;
use super::*;
#[gpui::test]
async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
let fs = fs::FakeFs::new(cx.executor().clone());
fs.create_dir(paths::settings_file().parent().unwrap())
.await
.unwrap();
fn test_deserialize_assistant_settings(cx: &mut AppContext) {
let store = settings::SettingsStore::test(cx);
cx.set_global(store);
cx.update(|cx| {
let test_settings = settings::SettingsStore::test(cx);
cx.set_global(test_settings);
AssistantSettings::register(cx);
// Settings default to gpt-4-turbo.
AssistantSettings::register(cx);
assert_eq!(
AssistantSettings::get_global(cx).provider,
AssistantProvider::OpenAi {
model: OpenAiModel::FourOmni,
api_url: open_ai::OPEN_AI_API_URL.into(),
low_speed_timeout_in_seconds: None,
available_models: Default::default(),
}
);
// Ensure backward-compatibility.
SettingsStore::update_global(cx, |store, cx| {
store
.set_user_settings(
r#"{
"assistant": {
"openai_api_url": "test-url",
}
}"#,
cx,
)
.unwrap();
});
cx.update(|cx| {
assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
assert_eq!(
AssistantSettings::get_global(cx).default_model,
LanguageModelSelection {
provider: "openai".into(),
model: "gpt-4o".into(),
}
);
assert_eq!(
AssistantSettings::get_global(cx).provider,
AssistantProvider::OpenAi {
model: OpenAiModel::FourOmni,
api_url: "test-url".into(),
low_speed_timeout_in_seconds: None,
available_models: Default::default(),
}
);
SettingsStore::update_global(cx, |store, cx| {
store
.set_user_settings(
r#"{
"assistant": {
"default_open_ai_model": "gpt-4-0613"
}
}"#,
cx,
)
.unwrap();
});
assert_eq!(
AssistantSettings::get_global(cx).provider,
AssistantProvider::OpenAi {
model: OpenAiModel::Four,
api_url: open_ai::OPEN_AI_API_URL.into(),
low_speed_timeout_in_seconds: None,
available_models: Default::default(),
}
);
cx.update(|cx| {
settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
fs.clone(),
|settings, _| {
*settings = AssistantSettingsContent::Versioned(
VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
default_model: Some(LanguageModelSelection {
provider: "test-provider".into(),
model: "gpt-99".into(),
}),
enabled: None,
button: None,
dock: None,
default_width: None,
default_height: None,
}),
)
},
);
// The new version supports setting a custom model when using zed.dev.
SettingsStore::update_global(cx, |store, cx| {
store
.set_user_settings(
r#"{
"assistant": {
"version": "1",
"provider": {
"name": "zed.dev",
"default_model": "custom"
}
}
}"#,
cx,
)
.unwrap();
});
cx.run_until_parked();
let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
assert!(raw_settings_value.contains(r#""version": "2""#));
#[derive(Debug, Deserialize)]
struct AssistantSettingsTest {
assistant: AssistantSettingsContent,
}
let assistant_settings: AssistantSettingsTest =
serde_json_lenient::from_str(&raw_settings_value).unwrap();
assert!(!assistant_settings.assistant.is_version_outdated());
assert_eq!(
AssistantSettings::get_global(cx).provider,
AssistantProvider::ZedDotDev {
model: CloudModel::Custom("custom".into())
}
);
}
}

View File

@@ -0,0 +1,373 @@
mod anthropic;
mod cloud;
#[cfg(test)]
mod fake;
mod ollama;
mod open_ai;
pub use anthropic::*;
pub use cloud::*;
#[cfg(test)]
pub use fake::*;
pub use ollama::*;
pub use open_ai::*;
use parking_lot::RwLock;
use smol::lock::{Semaphore, SemaphoreGuardArc};
use crate::{
assistant_settings::{AssistantProvider, AssistantSettings},
LanguageModel, LanguageModelRequest,
};
use anyhow::Result;
use client::Client;
use futures::{future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, AppContext, BorrowAppContext, Task, WindowContext};
use settings::{Settings, SettingsStore};
use std::time::Duration;
use std::{any::Any, sync::Arc};
/// Choose which model to use for openai provider.
/// If the model is not available, try to use the first available model, or fallback to the original model.
fn choose_openai_model(
model: &::open_ai::Model,
available_models: &[::open_ai::Model],
) -> ::open_ai::Model {
available_models
.iter()
.find(|&m| m == model)
.or_else(|| available_models.first())
.unwrap_or_else(|| model)
.clone()
}
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
let provider = create_provider_from_settings(client.clone(), 0, cx);
cx.set_global(CompletionProvider::new(provider, Some(client)));
let mut settings_version = 0;
cx.observe_global::<SettingsStore>(move |cx| {
settings_version += 1;
cx.update_global::<CompletionProvider, _>(|provider, cx| {
provider.update_settings(settings_version, cx);
})
})
.detach();
}
pub struct CompletionResponse {
pub inner: BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>,
_lock: SemaphoreGuardArc,
}
pub trait LanguageModelCompletionProvider: Send + Sync {
fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel>;
fn settings_version(&self) -> usize;
fn is_authenticated(&self) -> bool;
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>>;
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView;
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>>;
fn model(&self) -> LanguageModel;
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>>;
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
const MAX_CONCURRENT_COMPLETION_REQUESTS: usize = 4;
pub struct CompletionProvider {
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
client: Option<Arc<Client>>,
request_limiter: Arc<Semaphore>,
}
impl CompletionProvider {
pub fn new(
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
client: Option<Arc<Client>>,
) -> Self {
Self {
provider,
client,
request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_COMPLETION_REQUESTS)),
}
}
pub fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel> {
self.provider.read().available_models(cx)
}
pub fn settings_version(&self) -> usize {
self.provider.read().settings_version()
}
pub fn is_authenticated(&self) -> bool {
self.provider.read().is_authenticated()
}
pub fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
self.provider.read().authenticate(cx)
}
pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
self.provider.read().authentication_prompt(cx)
}
pub fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
self.provider.read().reset_credentials(cx)
}
pub fn model(&self) -> LanguageModel {
self.provider.read().model()
}
pub fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
self.provider.read().count_tokens(request, cx)
}
pub fn complete(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> Task<CompletionResponse> {
let rate_limiter = self.request_limiter.clone();
let provider = self.provider.clone();
cx.background_executor().spawn(async move {
let lock = rate_limiter.acquire_arc().await;
let response = provider.read().complete(request);
CompletionResponse {
inner: response,
_lock: lock,
}
})
}
}
impl gpui::Global for CompletionProvider {}
impl CompletionProvider {
pub fn global(cx: &AppContext) -> &Self {
cx.global::<Self>()
}
pub fn update_current_as<R, T: LanguageModelCompletionProvider + 'static>(
&mut self,
update: impl FnOnce(&mut T) -> R,
) -> Option<R> {
let mut provider = self.provider.write();
if let Some(provider) = provider.as_any_mut().downcast_mut::<T>() {
Some(update(provider))
} else {
None
}
}
pub fn update_settings(&mut self, version: usize, cx: &mut AppContext) {
let updated = match &AssistantSettings::get_global(cx).provider {
AssistantProvider::ZedDotDev { model } => self
.update_current_as::<_, CloudCompletionProvider>(|provider| {
provider.update(model.clone(), version);
}),
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => self.update_current_as::<_, OpenAiCompletionProvider>(|provider| {
provider.update(
choose_openai_model(&model, &available_models),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
);
}),
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
} => self.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
provider.update(
model.clone(),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
);
}),
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
} => self.update_current_as::<_, OllamaCompletionProvider>(|provider| {
provider.update(
model.clone(),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
cx,
);
}),
};
// Previously configured provider was changed to another one
if updated.is_none() {
if let Some(client) = self.client.clone() {
self.provider = create_provider_from_settings(client, version, cx);
} else {
log::warn!("completion provider cannot be created because client is not set");
}
}
}
}
fn create_provider_from_settings(
client: Arc<Client>,
settings_version: usize,
cx: &mut AppContext,
) -> Arc<RwLock<dyn LanguageModelCompletionProvider>> {
match &AssistantSettings::get_global(cx).provider {
AssistantProvider::ZedDotDev { model } => Arc::new(RwLock::new(
CloudCompletionProvider::new(model.clone(), client.clone(), settings_version, cx),
)),
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => Arc::new(RwLock::new(OpenAiCompletionProvider::new(
choose_openai_model(&model, &available_models),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
))),
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
} => Arc::new(RwLock::new(AnthropicCompletionProvider::new(
model.clone(),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
))),
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
} => Arc::new(RwLock::new(OllamaCompletionProvider::new(
model.clone(),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
cx,
))),
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use gpui::AppContext;
use parking_lot::RwLock;
use settings::SettingsStore;
use smol::stream::StreamExt;
use crate::{
completion_provider::MAX_CONCURRENT_COMPLETION_REQUESTS, CompletionProvider,
FakeCompletionProvider, LanguageModelRequest,
};
#[gpui::test]
fn test_rate_limiting(cx: &mut AppContext) {
SettingsStore::test(cx);
let fake_provider = FakeCompletionProvider::setup_test(cx);
let provider = CompletionProvider::new(Arc::new(RwLock::new(fake_provider.clone())), None);
// Enqueue some requests
for i in 0..MAX_CONCURRENT_COMPLETION_REQUESTS * 2 {
let response = provider.complete(
LanguageModelRequest {
temperature: i as f32 / 10.0,
..Default::default()
},
cx,
);
cx.background_executor()
.spawn(async move {
let response = response.await;
let mut stream = response.inner.await.unwrap();
while let Some(message) = stream.next().await {
message.unwrap();
}
})
.detach();
}
cx.background_executor().run_until_parked();
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS
);
// Get the first completion request that is in flight and mark it as completed.
let completion = fake_provider
.running_completions()
.into_iter()
.next()
.unwrap();
fake_provider.finish_completion(&completion);
// Ensure that the number of in-flight completion requests is reduced.
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
);
cx.background_executor().run_until_parked();
// Ensure that another completion request was allowed to acquire the lock.
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS
);
// Mark all completion requests as finished that are in flight.
for request in fake_provider.running_completions() {
fake_provider.finish_completion(&request);
}
assert_eq!(fake_provider.completion_count(), 0);
// Wait until the background tasks acquire the lock again.
cx.background_executor().run_until_parked();
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
);
// Finish all remaining completion requests.
for request in fake_provider.running_completions() {
fake_provider.finish_completion(&request);
}
cx.background_executor().run_until_parked();
assert_eq!(fake_provider.completion_count(), 0);
}
}

View File

@@ -0,0 +1,367 @@
use crate::{
assistant_settings::AnthropicModel, CompletionProvider, LanguageModel, LanguageModelRequest,
Role,
};
use crate::{count_open_ai_tokens, LanguageModelCompletionProvider, LanguageModelRequestMessage};
use anthropic::{stream_completion, Request, RequestMessage};
use anyhow::{anyhow, Result};
use editor::{Editor, EditorElement, EditorStyle};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{AnyView, AppContext, FontStyle, Task, TextStyle, View, WhiteSpace};
use http::HttpClient;
use settings::Settings;
use std::time::Duration;
use std::{env, sync::Arc};
use strum::IntoEnumIterator;
use theme::ThemeSettings;
use ui::prelude::*;
use util::ResultExt;
pub struct AnthropicCompletionProvider {
api_key: Option<String>,
api_url: String,
model: AnthropicModel,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
}
impl LanguageModelCompletionProvider for AnthropicCompletionProvider {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
AnthropicModel::iter()
.map(LanguageModel::Anthropic)
.collect()
}
fn settings_version(&self) -> usize {
self.settings_version
}
fn is_authenticated(&self) -> bool {
self.api_key.is_some()
}
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
if self.is_authenticated() {
Task::ready(Ok(()))
} else {
let api_url = self.api_url.clone();
cx.spawn(|mut cx| async move {
let api_key = if let Ok(api_key) = env::var("ANTHROPIC_API_KEY") {
api_key
} else {
let (_, api_key) = cx
.update(|cx| cx.read_credentials(&api_url))?
.await?
.ok_or_else(|| anyhow!("credentials not found"))?;
String::from_utf8(api_key)?
};
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
provider.api_key = Some(api_key);
});
})
})
}
}
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
let delete_credentials = cx.delete_credentials(&self.api_url);
cx.spawn(|mut cx| async move {
delete_credentials.await.log_err();
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
provider.api_key = None;
});
})
})
}
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
cx.new_view(|cx| AuthenticationPrompt::new(self.api_url.clone(), cx))
.into()
}
fn model(&self) -> LanguageModel {
LanguageModel::Anthropic(self.model.clone())
}
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
count_open_ai_tokens(request, cx.background_executor())
}
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
let request = self.to_anthropic_request(request);
let http_client = self.http_client.clone();
let api_key = self.api_key.clone();
let api_url = self.api_url.clone();
let low_speed_timeout = self.low_speed_timeout;
async move {
let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
let request = stream_completion(
http_client.as_ref(),
&api_url,
&api_key,
request,
low_speed_timeout,
);
let response = request.await?;
let stream = response
.filter_map(|response| async move {
match response {
Ok(response) => match response {
anthropic::ResponseEvent::ContentBlockStart {
content_block, ..
} => match content_block {
anthropic::ContentBlock::Text { text } => Some(Ok(text)),
},
anthropic::ResponseEvent::ContentBlockDelta { delta, .. } => {
match delta {
anthropic::TextDelta::TextDelta { text } => Some(Ok(text)),
}
}
_ => None,
},
Err(error) => Some(Err(error)),
}
})
.boxed();
Ok(stream)
}
.boxed()
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}
impl AnthropicCompletionProvider {
pub fn new(
model: AnthropicModel,
api_url: String,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
) -> Self {
Self {
api_key: None,
api_url,
model,
http_client,
low_speed_timeout,
settings_version,
}
}
pub fn update(
&mut self,
model: AnthropicModel,
api_url: String,
low_speed_timeout: Option<Duration>,
settings_version: usize,
) {
self.model = model;
self.api_url = api_url;
self.low_speed_timeout = low_speed_timeout;
self.settings_version = settings_version;
}
fn to_anthropic_request(&self, mut request: LanguageModelRequest) -> Request {
preprocess_anthropic_request(&mut request);
let model = match request.model {
LanguageModel::Anthropic(model) => model,
_ => self.model.clone(),
};
let mut system_message = String::new();
if request
.messages
.first()
.map_or(false, |message| message.role == Role::System)
{
system_message = request.messages.remove(0).content;
}
Request {
model,
messages: request
.messages
.iter()
.map(|msg| RequestMessage {
role: match msg.role {
Role::User => anthropic::Role::User,
Role::Assistant => anthropic::Role::Assistant,
Role::System => unreachable!("filtered out by preprocess_request"),
},
content: msg.content.clone(),
})
.collect(),
stream: true,
system: system_message,
max_tokens: 4092,
}
}
}
pub fn preprocess_anthropic_request(request: &mut LanguageModelRequest) {
let mut new_messages: Vec<LanguageModelRequestMessage> = Vec::new();
let mut system_message = String::new();
for message in request.messages.drain(..) {
if message.content.is_empty() {
continue;
}
match message.role {
Role::User | Role::Assistant => {
if let Some(last_message) = new_messages.last_mut() {
if last_message.role == message.role {
last_message.content.push_str("\n\n");
last_message.content.push_str(&message.content);
continue;
}
}
new_messages.push(message);
}
Role::System => {
if !system_message.is_empty() {
system_message.push_str("\n\n");
}
system_message.push_str(&message.content);
}
}
}
if !system_message.is_empty() {
new_messages.insert(
0,
LanguageModelRequestMessage {
role: Role::System,
content: system_message,
},
);
}
request.messages = new_messages;
}
struct AuthenticationPrompt {
api_key: View<Editor>,
api_url: String,
}
impl AuthenticationPrompt {
fn new(api_url: String, cx: &mut WindowContext) -> Self {
Self {
api_key: cx.new_view(|cx| {
let mut editor = Editor::single_line(cx);
editor.set_placeholder_text(
"sk-000000000000000000000000000000000000000000000000",
cx,
);
editor
}),
api_url,
}
}
fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
let api_key = self.api_key.read(cx).text(cx);
if api_key.is_empty() {
return;
}
let write_credentials = cx.write_credentials(&self.api_url, "Bearer", api_key.as_bytes());
cx.spawn(|_, mut cx| async move {
write_credentials.await?;
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
provider.api_key = Some(api_key);
});
})
})
.detach_and_log_err(cx);
}
fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let settings = ThemeSettings::get_global(cx);
let text_style = TextStyle {
color: cx.theme().colors().text,
font_family: settings.ui_font.family.clone(),
font_features: settings.ui_font.features.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(
&self.api_key,
EditorStyle {
background: cx.theme().colors().editor_background,
local_player: cx.theme().players().local(),
text: text_style,
..Default::default()
},
)
}
}
impl Render for AuthenticationPrompt {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
const INSTRUCTIONS: [&str; 4] = [
"To use the assistant panel or inline assistant, you need to add your Anthropic API key.",
"You can create an API key at: https://console.anthropic.com/settings/keys",
"",
"Paste your Anthropic API key below and hit enter to use the assistant:",
];
v_flex()
.p_4()
.size_full()
.on_action(cx.listener(Self::save_api_key))
.children(
INSTRUCTIONS.map(|instruction| Label::new(instruction).size(LabelSize::Small)),
)
.child(
h_flex()
.w_full()
.my_2()
.px_2()
.py_1()
.bg(cx.theme().colors().editor_background)
.rounded_md()
.child(self.render_api_key_editor(cx)),
)
.child(
Label::new(
"You can also assign the ANTHROPIC_API_KEY environment variable and restart Zed.",
)
.size(LabelSize::Small),
)
.child(
h_flex()
.gap_2()
.child(Label::new("Click on").size(LabelSize::Small))
.child(Icon::new(IconName::ZedAssistant).size(IconSize::XSmall))
.child(
Label::new("in the status bar to close this panel.").size(LabelSize::Small),
),
)
.into_any()
}
}

View File

@@ -0,0 +1,208 @@
use crate::{
assistant_settings::CloudModel, count_open_ai_tokens, CompletionProvider, LanguageModel,
LanguageModelCompletionProvider, LanguageModelRequest,
};
use anyhow::{anyhow, Result};
use client::{proto, Client};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt, TryFutureExt};
use gpui::{AnyView, AppContext, Task};
use std::{future, sync::Arc};
use strum::IntoEnumIterator;
use ui::prelude::*;
pub struct CloudCompletionProvider {
client: Arc<Client>,
model: CloudModel,
settings_version: usize,
status: client::Status,
_maintain_client_status: Task<()>,
}
impl CloudCompletionProvider {
pub fn new(
model: CloudModel,
client: Arc<Client>,
settings_version: usize,
cx: &mut AppContext,
) -> Self {
let mut status_rx = client.status();
let status = *status_rx.borrow();
let maintain_client_status = cx.spawn(|mut cx| async move {
while let Some(status) = status_rx.next().await {
let _ = cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, Self>(|provider| {
provider.status = status;
});
});
}
});
Self {
client,
model,
settings_version,
status,
_maintain_client_status: maintain_client_status,
}
}
pub fn update(&mut self, model: CloudModel, settings_version: usize) {
self.model = model;
self.settings_version = settings_version;
}
}
impl LanguageModelCompletionProvider for CloudCompletionProvider {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
let mut custom_model = if let CloudModel::Custom(custom_model) = self.model.clone() {
Some(custom_model)
} else {
None
};
CloudModel::iter()
.filter_map(move |model| {
if let CloudModel::Custom(_) = model {
Some(CloudModel::Custom(custom_model.take()?))
} else {
Some(model)
}
})
.map(LanguageModel::Cloud)
.collect()
}
fn settings_version(&self) -> usize {
self.settings_version
}
fn is_authenticated(&self) -> bool {
self.status.is_connected()
}
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(move |cx| async move { client.authenticate_and_connect(true, &cx).await })
}
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
cx.new_view(|_cx| AuthenticationPrompt).into()
}
fn reset_credentials(&self, _cx: &AppContext) -> Task<Result<()>> {
Task::ready(Ok(()))
}
fn model(&self) -> LanguageModel {
LanguageModel::Cloud(self.model.clone())
}
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
match request.model {
LanguageModel::Cloud(CloudModel::Gpt4)
| LanguageModel::Cloud(CloudModel::Gpt4Turbo)
| LanguageModel::Cloud(CloudModel::Gpt4Omni)
| LanguageModel::Cloud(CloudModel::Gpt3Point5Turbo) => {
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::Cloud(
CloudModel::Claude3_5Sonnet
| CloudModel::Claude3Opus
| CloudModel::Claude3Sonnet
| CloudModel::Claude3Haiku,
) => {
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::Cloud(CloudModel::Custom(model)) => {
let request = self.client.request(proto::CountTokensWithLanguageModel {
model,
messages: request
.messages
.iter()
.map(|message| message.to_proto())
.collect(),
});
async move {
let response = request.await?;
Ok(response.token_count as usize)
}
.boxed()
}
_ => future::ready(Err(anyhow!("invalid model"))).boxed(),
}
}
fn complete(
&self,
mut request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
request.preprocess();
let request = proto::CompleteWithLanguageModel {
model: request.model.id().to_string(),
messages: request
.messages
.iter()
.map(|message| message.to_proto())
.collect(),
stop: request.stop,
temperature: request.temperature,
tools: Vec::new(),
tool_choice: None,
};
self.client
.request_stream(request)
.map_ok(|stream| {
stream
.filter_map(|response| async move {
match response {
Ok(mut response) => Some(Ok(response.choices.pop()?.delta?.content?)),
Err(error) => Some(Err(error)),
}
})
.boxed()
})
.boxed()
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}
struct AuthenticationPrompt;
impl Render for AuthenticationPrompt {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
const LABEL: &str = "Generate and analyze code with language models. You can dialog with the assistant in this panel or transform code inline.";
v_flex().gap_6().p_4().child(Label::new(LABEL)).child(
v_flex()
.gap_2()
.child(
Button::new("sign_in", "Sign in")
.icon_color(Color::Muted)
.icon(IconName::Github)
.icon_position(IconPosition::Start)
.style(ButtonStyle::Filled)
.full_width()
.on_click(|_, cx| {
CompletionProvider::global(cx)
.authenticate(cx)
.detach_and_log_err(cx);
}),
)
.child(
div().flex().w_full().items_center().child(
Label::new("Sign in to enable collaboration.")
.color(Color::Muted)
.size(LabelSize::Small),
),
),
)
}
}

View File

@@ -0,0 +1,107 @@
use anyhow::Result;
use collections::HashMap;
use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{AnyView, AppContext, Task};
use std::sync::Arc;
use ui::WindowContext;
use crate::{LanguageModel, LanguageModelCompletionProvider, LanguageModelRequest};
#[derive(Clone, Default)]
pub struct FakeCompletionProvider {
current_completion_txs: Arc<parking_lot::Mutex<HashMap<String, mpsc::UnboundedSender<String>>>>,
}
impl FakeCompletionProvider {
#[cfg(test)]
pub fn setup_test(cx: &mut AppContext) -> Self {
use crate::CompletionProvider;
use parking_lot::RwLock;
let this = Self::default();
let provider = CompletionProvider::new(Arc::new(RwLock::new(this.clone())), None);
cx.set_global(provider);
this
}
pub fn running_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs
.lock()
.keys()
.map(|k| serde_json::from_str(k).unwrap())
.collect()
}
pub fn completion_count(&self) -> usize {
self.current_completion_txs.lock().len()
}
pub fn send_completion(&self, request: &LanguageModelRequest, chunk: String) {
let json = serde_json::to_string(request).unwrap();
self.current_completion_txs
.lock()
.get(&json)
.unwrap()
.unbounded_send(chunk)
.unwrap();
}
pub fn finish_completion(&self, request: &LanguageModelRequest) {
self.current_completion_txs
.lock()
.remove(&serde_json::to_string(request).unwrap());
}
}
impl LanguageModelCompletionProvider for FakeCompletionProvider {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
vec![LanguageModel::default()]
}
fn settings_version(&self) -> usize {
0
}
fn is_authenticated(&self) -> bool {
true
}
fn authenticate(&self, _cx: &AppContext) -> Task<Result<()>> {
Task::ready(Ok(()))
}
fn authentication_prompt(&self, _cx: &mut WindowContext) -> AnyView {
unimplemented!()
}
fn reset_credentials(&self, _cx: &AppContext) -> Task<Result<()>> {
Task::ready(Ok(()))
}
fn model(&self) -> LanguageModel {
LanguageModel::default()
}
fn count_tokens(
&self,
_request: LanguageModelRequest,
_cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
futures::future::ready(Ok(0)).boxed()
}
fn complete(
&self,
_request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
let (tx, rx) = mpsc::unbounded();
self.current_completion_txs
.lock()
.insert(serde_json::to_string(&_request).unwrap(), tx);
async move { Ok(rx.map(Ok).boxed()) }.boxed()
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}

View File

@@ -0,0 +1,358 @@
use crate::LanguageModelCompletionProvider;
use crate::{
assistant_settings::OllamaModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role,
};
use anyhow::Result;
use futures::StreamExt as _;
use futures::{future::BoxFuture, stream::BoxStream, FutureExt};
use gpui::{AnyView, AppContext, Task};
use http::HttpClient;
use ollama::{
get_models, preload_model, stream_chat_completion, ChatMessage, ChatOptions, ChatRequest,
Role as OllamaRole,
};
use std::sync::Arc;
use std::time::Duration;
use ui::{prelude::*, ButtonLike, ElevationIndex};
const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download";
const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library";
pub struct OllamaCompletionProvider {
api_url: String,
model: OllamaModel,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
available_models: Vec<OllamaModel>,
}
impl LanguageModelCompletionProvider for OllamaCompletionProvider {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
self.available_models
.iter()
.map(|m| LanguageModel::Ollama(m.clone()))
.collect()
}
fn settings_version(&self) -> usize {
self.settings_version
}
fn is_authenticated(&self) -> bool {
!self.available_models.is_empty()
}
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
if self.is_authenticated() {
Task::ready(Ok(()))
} else {
self.fetch_models(cx)
}
}
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
let fetch_models = Box::new(move |cx: &mut WindowContext| {
cx.update_global::<CompletionProvider, _>(|provider, cx| {
provider
.update_current_as::<_, OllamaCompletionProvider>(|provider| {
provider.fetch_models(cx)
})
.unwrap_or_else(|| Task::ready(Ok(())))
})
});
cx.new_view(|cx| DownloadOllamaMessage::new(fetch_models, cx))
.into()
}
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
self.fetch_models(cx)
}
fn model(&self) -> LanguageModel {
LanguageModel::Ollama(self.model.clone())
}
fn count_tokens(
&self,
request: LanguageModelRequest,
_cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
// There is no endpoint for this _yet_ in Ollama
// see: https://github.com/ollama/ollama/issues/1716 and https://github.com/ollama/ollama/issues/3582
let token_count = request
.messages
.iter()
.map(|msg| msg.content.chars().count())
.sum::<usize>()
/ 4;
async move { Ok(token_count) }.boxed()
}
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
let request = self.to_ollama_request(request);
let http_client = self.http_client.clone();
let api_url = self.api_url.clone();
let low_speed_timeout = self.low_speed_timeout;
async move {
let request =
stream_chat_completion(http_client.as_ref(), &api_url, request, low_speed_timeout);
let response = request.await?;
let stream = response
.filter_map(|response| async move {
match response {
Ok(delta) => {
let content = match delta.message {
ChatMessage::User { content } => content,
ChatMessage::Assistant { content } => content,
ChatMessage::System { content } => content,
};
Some(Ok(content))
}
Err(error) => Some(Err(error)),
}
})
.boxed();
Ok(stream)
}
.boxed()
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}
impl OllamaCompletionProvider {
pub fn new(
model: OllamaModel,
api_url: String,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
cx: &AppContext,
) -> Self {
cx.spawn({
let api_url = api_url.clone();
let client = http_client.clone();
let model = model.name.clone();
|_| async move {
if model.is_empty() {
return Ok(());
}
preload_model(client.as_ref(), &api_url, &model).await
}
})
.detach_and_log_err(cx);
Self {
api_url,
model,
http_client,
low_speed_timeout,
settings_version,
available_models: Default::default(),
}
}
pub fn update(
&mut self,
model: OllamaModel,
api_url: String,
low_speed_timeout: Option<Duration>,
settings_version: usize,
cx: &AppContext,
) {
cx.spawn({
let api_url = api_url.clone();
let client = self.http_client.clone();
let model = model.name.clone();
|_| async move { preload_model(client.as_ref(), &api_url, &model).await }
})
.detach_and_log_err(cx);
if model.name.is_empty() {
self.select_first_available_model()
} else {
self.model = model;
}
self.api_url = api_url;
self.low_speed_timeout = low_speed_timeout;
self.settings_version = settings_version;
}
pub fn select_first_available_model(&mut self) {
if let Some(model) = self.available_models.first() {
self.model = model.clone();
}
}
pub fn fetch_models(&self, cx: &AppContext) -> Task<Result<()>> {
let http_client = self.http_client.clone();
let api_url = self.api_url.clone();
// As a proxy for the server being "authenticated", we'll check if its up by fetching the models
cx.spawn(|mut cx| async move {
let models = get_models(http_client.as_ref(), &api_url, None).await?;
let mut models: Vec<OllamaModel> = models
.into_iter()
// Since there is no metadata from the Ollama API
// indicating which models are embedding models,
// simply filter out models with "-embed" in their name
.filter(|model| !model.name.contains("-embed"))
.map(|model| OllamaModel::new(&model.name))
.collect();
models.sort_by(|a, b| a.name.cmp(&b.name));
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, OllamaCompletionProvider>(|provider| {
provider.available_models = models;
if !provider.available_models.is_empty() && provider.model.name.is_empty() {
provider.select_first_available_model()
}
});
})
})
}
fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest {
let model = match request.model {
LanguageModel::Ollama(model) => model,
_ => self.model.clone(),
};
ChatRequest {
model: model.name,
messages: request
.messages
.into_iter()
.map(|msg| match msg.role {
Role::User => ChatMessage::User {
content: msg.content,
},
Role::Assistant => ChatMessage::Assistant {
content: msg.content,
},
Role::System => ChatMessage::System {
content: msg.content,
},
})
.collect(),
keep_alive: model.keep_alive.unwrap_or_default(),
stream: true,
options: Some(ChatOptions {
num_ctx: Some(model.max_tokens),
stop: Some(request.stop),
temperature: Some(request.temperature),
..Default::default()
}),
}
}
}
impl From<Role> for ollama::Role {
fn from(val: Role) -> Self {
match val {
Role::User => OllamaRole::User,
Role::Assistant => OllamaRole::Assistant,
Role::System => OllamaRole::System,
}
}
}
struct DownloadOllamaMessage {
retry_connection: Box<dyn Fn(&mut WindowContext) -> Task<Result<()>>>,
}
impl DownloadOllamaMessage {
pub fn new(
retry_connection: Box<dyn Fn(&mut WindowContext) -> Task<Result<()>>>,
_cx: &mut ViewContext<Self>,
) -> Self {
Self { retry_connection }
}
fn render_download_button(&self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
ButtonLike::new("download_ollama_button")
.style(ButtonStyle::Filled)
.size(ButtonSize::Large)
.layer(ElevationIndex::ModalSurface)
.child(Label::new("Get Ollama"))
.on_click(move |_, cx| cx.open_url(OLLAMA_DOWNLOAD_URL))
}
fn render_retry_button(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
ButtonLike::new("retry_ollama_models")
.style(ButtonStyle::Filled)
.size(ButtonSize::Large)
.layer(ElevationIndex::ModalSurface)
.child(Label::new("Retry"))
.on_click(cx.listener(move |this, _, cx| {
let connected = (this.retry_connection)(cx);
cx.spawn(|_this, _cx| async move {
connected.await?;
anyhow::Ok(())
})
.detach_and_log_err(cx)
}))
}
fn render_next_steps(&self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
v_flex()
.p_4()
.size_full()
.gap_2()
.child(
Label::new("Once Ollama is on your machine, make sure to download a model or two.")
.size(LabelSize::Large),
)
.child(
h_flex().w_full().p_4().justify_center().gap_2().child(
ButtonLike::new("view-models")
.style(ButtonStyle::Filled)
.size(ButtonSize::Large)
.layer(ElevationIndex::ModalSurface)
.child(Label::new("View Available Models"))
.on_click(move |_, cx| cx.open_url(OLLAMA_LIBRARY_URL)),
),
)
}
}
impl Render for DownloadOllamaMessage {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
v_flex()
.p_4()
.size_full()
.gap_2()
.child(Label::new("To use Ollama models via the assistant, Ollama must be running on your machine with at least one model downloaded.").size(LabelSize::Large))
.child(
h_flex()
.w_full()
.p_4()
.justify_center()
.gap_2()
.child(
self.render_download_button(cx)
)
.child(
self.render_retry_button(cx)
)
)
.child(self.render_next_steps(cx))
.into_any()
}
}

View File

@@ -0,0 +1,378 @@
use crate::assistant_settings::CloudModel;
use crate::assistant_settings::{AssistantProvider, AssistantSettings};
use crate::LanguageModelCompletionProvider;
use crate::{
assistant_settings::OpenAiModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role,
};
use anyhow::{anyhow, Result};
use editor::{Editor, EditorElement, EditorStyle};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{AnyView, AppContext, FontStyle, Task, TextStyle, View, WhiteSpace};
use http::HttpClient;
use open_ai::{stream_completion, Request, RequestMessage, Role as OpenAiRole};
use settings::Settings;
use std::time::Duration;
use std::{env, sync::Arc};
use strum::IntoEnumIterator;
use theme::ThemeSettings;
use ui::prelude::*;
use util::ResultExt;
pub struct OpenAiCompletionProvider {
api_key: Option<String>,
api_url: String,
model: OpenAiModel,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
}
impl OpenAiCompletionProvider {
pub fn new(
model: OpenAiModel,
api_url: String,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
) -> Self {
Self {
api_key: None,
api_url,
model,
http_client,
low_speed_timeout,
settings_version,
}
}
pub fn update(
&mut self,
model: OpenAiModel,
api_url: String,
low_speed_timeout: Option<Duration>,
settings_version: usize,
) {
self.model = model;
self.api_url = api_url;
self.low_speed_timeout = low_speed_timeout;
self.settings_version = settings_version;
}
fn to_open_ai_request(&self, request: LanguageModelRequest) -> Request {
let model = match request.model {
LanguageModel::OpenAi(model) => model,
_ => self.model.clone(),
};
Request {
model,
messages: request
.messages
.into_iter()
.map(|msg| match msg.role {
Role::User => RequestMessage::User {
content: msg.content,
},
Role::Assistant => RequestMessage::Assistant {
content: Some(msg.content),
tool_calls: Vec::new(),
},
Role::System => RequestMessage::System {
content: msg.content,
},
})
.collect(),
stream: true,
stop: request.stop,
temperature: request.temperature,
tools: Vec::new(),
tool_choice: None,
}
}
}
impl LanguageModelCompletionProvider for OpenAiCompletionProvider {
fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel> {
if let AssistantProvider::OpenAi {
available_models, ..
} = &AssistantSettings::get_global(cx).provider
{
if !available_models.is_empty() {
return available_models
.iter()
.cloned()
.map(LanguageModel::OpenAi)
.collect();
}
}
let available_models = if matches!(self.model, OpenAiModel::Custom { .. }) {
vec![self.model.clone()]
} else {
OpenAiModel::iter()
.filter(|model| !matches!(model, OpenAiModel::Custom { .. }))
.collect()
};
available_models
.into_iter()
.map(LanguageModel::OpenAi)
.collect()
}
fn settings_version(&self) -> usize {
self.settings_version
}
fn is_authenticated(&self) -> bool {
self.api_key.is_some()
}
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
if self.is_authenticated() {
Task::ready(Ok(()))
} else {
let api_url = self.api_url.clone();
cx.spawn(|mut cx| async move {
let api_key = if let Ok(api_key) = env::var("OPENAI_API_KEY") {
api_key
} else {
let (_, api_key) = cx
.update(|cx| cx.read_credentials(&api_url))?
.await?
.ok_or_else(|| anyhow!("credentials not found"))?;
String::from_utf8(api_key)?
};
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, Self>(|provider| {
provider.api_key = Some(api_key);
});
})
})
}
}
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
let delete_credentials = cx.delete_credentials(&self.api_url);
cx.spawn(|mut cx| async move {
delete_credentials.await.log_err();
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, Self>(|provider| {
provider.api_key = None;
});
})
})
}
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
cx.new_view(|cx| AuthenticationPrompt::new(self.api_url.clone(), cx))
.into()
}
fn model(&self) -> LanguageModel {
LanguageModel::OpenAi(self.model.clone())
}
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
count_open_ai_tokens(request, cx.background_executor())
}
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
let request = self.to_open_ai_request(request);
let http_client = self.http_client.clone();
let api_key = self.api_key.clone();
let api_url = self.api_url.clone();
let low_speed_timeout = self.low_speed_timeout;
async move {
let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
let request = stream_completion(
http_client.as_ref(),
&api_url,
&api_key,
request,
low_speed_timeout,
);
let response = request.await?;
let stream = response
.filter_map(|response| async move {
match response {
Ok(mut response) => Some(Ok(response.choices.pop()?.delta.content?)),
Err(error) => Some(Err(error)),
}
})
.boxed();
Ok(stream)
}
.boxed()
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}
pub fn count_open_ai_tokens(
request: LanguageModelRequest,
background_executor: &gpui::BackgroundExecutor,
) -> BoxFuture<'static, Result<usize>> {
background_executor
.spawn(async move {
let messages = request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.content),
name: None,
function_call: None,
})
.collect::<Vec<_>>();
match request.model {
LanguageModel::Anthropic(_)
| LanguageModel::Cloud(CloudModel::Claude3_5Sonnet)
| LanguageModel::Cloud(CloudModel::Claude3Opus)
| LanguageModel::Cloud(CloudModel::Claude3Sonnet)
| LanguageModel::Cloud(CloudModel::Claude3Haiku)
| LanguageModel::OpenAi(OpenAiModel::Custom { .. }) => {
// Tiktoken doesn't yet support these models, so we manually use the
// same tokenizer as GPT-4.
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
}
_ => tiktoken_rs::num_tokens_from_messages(request.model.id(), &messages),
}
})
.boxed()
}
impl From<Role> for open_ai::Role {
fn from(val: Role) -> Self {
match val {
Role::User => OpenAiRole::User,
Role::Assistant => OpenAiRole::Assistant,
Role::System => OpenAiRole::System,
}
}
}
struct AuthenticationPrompt {
api_key: View<Editor>,
api_url: String,
}
impl AuthenticationPrompt {
fn new(api_url: String, cx: &mut WindowContext) -> Self {
Self {
api_key: cx.new_view(|cx| {
let mut editor = Editor::single_line(cx);
editor.set_placeholder_text(
"sk-000000000000000000000000000000000000000000000000",
cx,
);
editor
}),
api_url,
}
}
fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
let api_key = self.api_key.read(cx).text(cx);
if api_key.is_empty() {
return;
}
let write_credentials = cx.write_credentials(&self.api_url, "Bearer", api_key.as_bytes());
cx.spawn(|_, mut cx| async move {
write_credentials.await?;
cx.update_global::<CompletionProvider, _>(|provider, _cx| {
provider.update_current_as::<_, OpenAiCompletionProvider>(|provider| {
provider.api_key = Some(api_key);
});
})
})
.detach_and_log_err(cx);
}
fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let settings = ThemeSettings::get_global(cx);
let text_style = TextStyle {
color: cx.theme().colors().text,
font_family: settings.ui_font.family.clone(),
font_features: settings.ui_font.features.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(
&self.api_key,
EditorStyle {
background: cx.theme().colors().editor_background,
local_player: cx.theme().players().local(),
text: text_style,
..Default::default()
},
)
}
}
impl Render for AuthenticationPrompt {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
const INSTRUCTIONS: [&str; 6] = [
"To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
" - You can create an API key at: platform.openai.com/api-keys",
" - Make sure your OpenAI account has credits",
" - Having a subscription for another service like GitHub Copilot won't work.",
"",
"Paste your OpenAI API key below and hit enter to use the assistant:",
];
v_flex()
.p_4()
.size_full()
.on_action(cx.listener(Self::save_api_key))
.children(
INSTRUCTIONS.map(|instruction| Label::new(instruction).size(LabelSize::Small)),
)
.child(
h_flex()
.w_full()
.my_2()
.px_2()
.py_1()
.bg(cx.theme().colors().editor_background)
.rounded_md()
.child(self.render_api_key_editor(cx)),
)
.child(
Label::new(
"You can also assign the OPENAI_API_KEY environment variable and restart Zed.",
)
.size(LabelSize::Small),
)
.child(
h_flex()
.gap_2()
.child(Label::new("Click on").size(LabelSize::Small))
.child(Icon::new(IconName::ZedAssistant).size(IconSize::XSmall))
.child(
Label::new("in the status bar to close this panel.").size(LabelSize::Small),
),
)
.into_any()
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,132 +1,97 @@
use crate::{
prompts::PromptBuilder, Context, ContextEvent, ContextId, ContextOperation, ContextVersion,
SavedContext, SavedContextMetadata,
};
use anyhow::{anyhow, Context as _, Result};
use client::{proto, telemetry::Telemetry, Client, TypedEnvelope};
use clock::ReplicaId;
use crate::{assistant_settings::OpenAiModel, MessageId, MessageMetadata};
use anyhow::{anyhow, Result};
use assistant_slash_command::SlashCommandOutputSection;
use collections::HashMap;
use fs::Fs;
use futures::StreamExt;
use fuzzy::StringMatchCandidate;
use gpui::{
AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel,
};
use language::LanguageRegistry;
use gpui::{AppContext, Model, ModelContext, Task};
use paths::contexts_dir;
use project::Project;
use regex::Regex;
use std::{
cmp::Reverse,
ffi::OsStr,
mem,
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
use serde::{Deserialize, Serialize};
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc, time::Duration};
use ui::Context;
use util::{ResultExt, TryFutureExt};
pub fn init(client: &Arc<Client>) {
client.add_model_message_handler(ContextStore::handle_advertise_contexts);
client.add_model_request_handler(ContextStore::handle_open_context);
client.add_model_request_handler(ContextStore::handle_create_context);
client.add_model_message_handler(ContextStore::handle_update_context);
client.add_model_request_handler(ContextStore::handle_synchronize_contexts);
#[derive(Serialize, Deserialize)]
pub struct SavedMessage {
pub id: MessageId,
pub start: usize,
}
#[derive(Serialize, Deserialize)]
pub struct SavedContext {
pub id: Option<String>,
pub zed: String,
pub version: String,
pub text: String,
pub messages: Vec<SavedMessage>,
pub message_metadata: HashMap<MessageId, MessageMetadata>,
pub summary: String,
pub slash_command_output_sections: Vec<SlashCommandOutputSection<usize>>,
}
impl SavedContext {
pub const VERSION: &'static str = "0.3.0";
}
#[derive(Serialize, Deserialize)]
pub struct SavedContextV0_2_0 {
pub id: Option<String>,
pub zed: String,
pub version: String,
pub text: String,
pub messages: Vec<SavedMessage>,
pub message_metadata: HashMap<MessageId, MessageMetadata>,
pub summary: String,
}
#[derive(Serialize, Deserialize)]
struct SavedContextV0_1_0 {
id: Option<String>,
zed: String,
version: String,
text: String,
messages: Vec<SavedMessage>,
message_metadata: HashMap<MessageId, MessageMetadata>,
summary: String,
api_url: Option<String>,
model: OpenAiModel,
}
#[derive(Clone)]
pub struct RemoteContextMetadata {
pub id: ContextId,
pub summary: Option<String>,
pub struct SavedContextMetadata {
pub title: String,
pub path: PathBuf,
pub mtime: chrono::DateTime<chrono::Local>,
}
pub struct ContextStore {
contexts: Vec<ContextHandle>,
contexts_metadata: Vec<SavedContextMetadata>,
host_contexts: Vec<RemoteContextMetadata>,
fs: Arc<dyn Fs>,
languages: Arc<LanguageRegistry>,
telemetry: Arc<Telemetry>,
_watch_updates: Task<Option<()>>,
client: Arc<Client>,
project: Model<Project>,
project_is_shared: bool,
client_subscription: Option<client::Subscription>,
_project_subscriptions: Vec<gpui::Subscription>,
prompt_builder: Arc<PromptBuilder>,
}
pub enum ContextStoreEvent {
ContextCreated(ContextId),
}
impl EventEmitter<ContextStoreEvent> for ContextStore {}
enum ContextHandle {
Weak(WeakModel<Context>),
Strong(Model<Context>),
}
impl ContextHandle {
fn upgrade(&self) -> Option<Model<Context>> {
match self {
ContextHandle::Weak(weak) => weak.upgrade(),
ContextHandle::Strong(strong) => Some(strong.clone()),
}
}
fn downgrade(&self) -> WeakModel<Context> {
match self {
ContextHandle::Weak(weak) => weak.clone(),
ContextHandle::Strong(strong) => strong.downgrade(),
}
}
}
impl ContextStore {
pub fn new(
project: Model<Project>,
prompt_builder: Arc<PromptBuilder>,
cx: &mut AppContext,
) -> Task<Result<Model<Self>>> {
let fs = project.read(cx).fs().clone();
let languages = project.read(cx).languages().clone();
let telemetry = project.read(cx).client().telemetry().clone();
pub fn new(fs: Arc<dyn Fs>, cx: &mut AppContext) -> Task<Result<Model<Self>>> {
cx.spawn(|mut cx| async move {
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
let this = cx.new_model(|cx: &mut ModelContext<Self>| {
let mut this = Self {
contexts: Vec::new(),
contexts_metadata: Vec::new(),
host_contexts: Vec::new(),
fs,
languages,
telemetry,
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
anyhow::Ok(())
let this = cx.new_model(|cx: &mut ModelContext<Self>| Self {
contexts_metadata: Vec::new(),
fs,
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
.log_err()
}),
client_subscription: None,
_project_subscriptions: vec![
cx.observe(&project, Self::handle_project_changed),
cx.subscribe(&project, Self::handle_project_event),
],
project_is_shared: false,
client: project.read(cx).client(),
project: project.clone(),
prompt_builder,
};
this.handle_project_changed(project, cx);
this.synchronize_contexts(cx);
this
anyhow::Ok(())
}
.log_err()
}),
})?;
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
@@ -135,541 +100,52 @@ impl ContextStore {
})
}
async fn handle_advertise_contexts(
this: Model<Self>,
envelope: TypedEnvelope<proto::AdvertiseContexts>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.host_contexts = envelope
.payload
.contexts
.into_iter()
.map(|context| RemoteContextMetadata {
id: ContextId::from_proto(context.context_id),
summary: context.summary,
})
.collect();
cx.notify();
})
}
async fn handle_open_context(
this: Model<Self>,
envelope: TypedEnvelope<proto::OpenContext>,
mut cx: AsyncAppContext,
) -> Result<proto::OpenContextResponse> {
let context_id = ContextId::from_proto(envelope.payload.context_id);
let operations = this.update(&mut cx, |this, cx| {
if this.project.read(cx).is_remote() {
return Err(anyhow!("only the host contexts can be opened"));
}
let context = this
.loaded_context_for_id(&context_id, cx)
.context("context not found")?;
if context.read(cx).replica_id() != ReplicaId::default() {
return Err(anyhow!("context must be opened via the host"));
}
anyhow::Ok(
context
.read(cx)
.serialize_ops(&ContextVersion::default(), cx),
)
})??;
let operations = operations.await;
Ok(proto::OpenContextResponse {
context: Some(proto::Context { operations }),
})
}
async fn handle_create_context(
this: Model<Self>,
_: TypedEnvelope<proto::CreateContext>,
mut cx: AsyncAppContext,
) -> Result<proto::CreateContextResponse> {
let (context_id, operations) = this.update(&mut cx, |this, cx| {
if this.project.read(cx).is_remote() {
return Err(anyhow!("can only create contexts as the host"));
}
let context = this.create(cx);
let context_id = context.read(cx).id().clone();
cx.emit(ContextStoreEvent::ContextCreated(context_id.clone()));
anyhow::Ok((
context_id,
context
.read(cx)
.serialize_ops(&ContextVersion::default(), cx),
))
})??;
let operations = operations.await;
Ok(proto::CreateContextResponse {
context_id: context_id.to_proto(),
context: Some(proto::Context { operations }),
})
}
async fn handle_update_context(
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateContext>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
let context_id = ContextId::from_proto(envelope.payload.context_id);
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
let operation_proto = envelope.payload.operation.context("invalid operation")?;
let operation = ContextOperation::from_proto(operation_proto)?;
context.update(cx, |context, cx| context.apply_ops([operation], cx))?;
}
Ok(())
})?
}
async fn handle_synchronize_contexts(
this: Model<Self>,
envelope: TypedEnvelope<proto::SynchronizeContexts>,
mut cx: AsyncAppContext,
) -> Result<proto::SynchronizeContextsResponse> {
this.update(&mut cx, |this, cx| {
if this.project.read(cx).is_remote() {
return Err(anyhow!("only the host can synchronize contexts"));
}
let mut local_versions = Vec::new();
for remote_version_proto in envelope.payload.contexts {
let remote_version = ContextVersion::from_proto(&remote_version_proto);
let context_id = ContextId::from_proto(remote_version_proto.context_id);
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
let context = context.read(cx);
let operations = context.serialize_ops(&remote_version, cx);
local_versions.push(context.version(cx).to_proto(context_id.clone()));
let client = this.client.clone();
let project_id = envelope.payload.project_id;
cx.background_executor()
.spawn(async move {
let operations = operations.await;
for operation in operations {
client.send(proto::UpdateContext {
project_id,
context_id: context_id.to_proto(),
operation: Some(operation),
})?;
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
}
this.advertise_contexts(cx);
anyhow::Ok(proto::SynchronizeContextsResponse {
contexts: local_versions,
})
})?
}
fn handle_project_changed(&mut self, _: Model<Project>, cx: &mut ModelContext<Self>) {
let is_shared = self.project.read(cx).is_shared();
let was_shared = mem::replace(&mut self.project_is_shared, is_shared);
if is_shared == was_shared {
return;
}
if is_shared {
self.contexts.retain_mut(|context| {
if let Some(strong_context) = context.upgrade() {
*context = ContextHandle::Strong(strong_context);
true
} else {
false
}
});
let remote_id = self.project.read(cx).remote_id().unwrap();
self.client_subscription = self
.client
.subscribe_to_entity(remote_id)
.log_err()
.map(|subscription| subscription.set_model(&cx.handle(), &mut cx.to_async()));
self.advertise_contexts(cx);
} else {
self.client_subscription = None;
}
}
fn handle_project_event(
&mut self,
_: Model<Project>,
event: &project::Event,
cx: &mut ModelContext<Self>,
) {
match event {
project::Event::Reshared => {
self.advertise_contexts(cx);
}
project::Event::HostReshared | project::Event::Rejoined => {
self.synchronize_contexts(cx);
}
project::Event::DisconnectedFromHost => {
self.contexts.retain_mut(|context| {
if let Some(strong_context) = context.upgrade() {
*context = ContextHandle::Weak(context.downgrade());
strong_context.update(cx, |context, cx| {
if context.replica_id() != ReplicaId::default() {
context.set_capability(language::Capability::ReadOnly, cx);
}
});
true
} else {
false
}
});
self.host_contexts.clear();
cx.notify();
}
_ => {}
}
}
pub fn create(&mut self, cx: &mut ModelContext<Self>) -> Model<Context> {
let context = cx.new_model(|cx| {
Context::local(
self.languages.clone(),
Some(self.project.clone()),
Some(self.telemetry.clone()),
self.prompt_builder.clone(),
cx,
)
});
self.register_context(&context, cx);
context
}
pub fn create_remote_context(
&mut self,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Context>>> {
let project = self.project.read(cx);
let Some(project_id) = project.remote_id() else {
return Task::ready(Err(anyhow!("project was not remote")));
};
if project.is_local() {
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
}
let replica_id = project.replica_id();
let capability = project.capability();
let language_registry = self.languages.clone();
let project = self.project.clone();
let telemetry = self.telemetry.clone();
let prompt_builder = self.prompt_builder.clone();
let request = self.client.request(proto::CreateContext { project_id });
cx.spawn(|this, mut cx| async move {
let response = request.await?;
let context_id = ContextId::from_proto(response.context_id);
let context_proto = response.context.context("invalid context")?;
let context = cx.new_model(|cx| {
Context::new(
context_id.clone(),
replica_id,
capability,
language_registry,
prompt_builder,
Some(project),
Some(telemetry),
cx,
)
})?;
let operations = cx
.background_executor()
.spawn(async move {
context_proto
.operations
.into_iter()
.map(|op| ContextOperation::from_proto(op))
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
this.register_context(&context, cx);
this.synchronize_contexts(cx);
context
}
})
})
}
pub fn open_local_context(
&mut self,
path: PathBuf,
cx: &ModelContext<Self>,
) -> Task<Result<Model<Context>>> {
if let Some(existing_context) = self.loaded_context_for_path(&path, cx) {
return Task::ready(Ok(existing_context));
}
pub fn load(&self, path: PathBuf, cx: &AppContext) -> Task<Result<SavedContext>> {
let fs = self.fs.clone();
let languages = self.languages.clone();
let project = self.project.clone();
let telemetry = self.telemetry.clone();
let load = cx.background_executor().spawn({
let path = path.clone();
async move {
let saved_context = fs.load(&path).await?;
SavedContext::from_json(&saved_context)
}
});
let prompt_builder = self.prompt_builder.clone();
cx.spawn(|this, mut cx| async move {
let saved_context = load.await?;
let context = cx.new_model(|cx| {
Context::deserialize(
saved_context,
path.clone(),
languages,
prompt_builder,
Some(project),
Some(telemetry),
cx,
)
})?;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
existing_context
} else {
this.register_context(&context, cx);
context
}
})
})
}
fn loaded_context_for_path(&self, path: &Path, cx: &AppContext) -> Option<Model<Context>> {
self.contexts.iter().find_map(|context| {
let context = context.upgrade()?;
if context.read(cx).path() == Some(path) {
Some(context)
} else {
None
}
})
}
pub(super) fn loaded_context_for_id(
&self,
id: &ContextId,
cx: &AppContext,
) -> Option<Model<Context>> {
self.contexts.iter().find_map(|context| {
let context = context.upgrade()?;
if context.read(cx).id() == id {
Some(context)
} else {
None
}
})
}
pub fn open_remote_context(
&mut self,
context_id: ContextId,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Context>>> {
let project = self.project.read(cx);
let Some(project_id) = project.remote_id() else {
return Task::ready(Err(anyhow!("project was not remote")));
};
if project.is_local() {
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
}
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
return Task::ready(Ok(context));
}
let replica_id = project.replica_id();
let capability = project.capability();
let language_registry = self.languages.clone();
let project = self.project.clone();
let telemetry = self.telemetry.clone();
let request = self.client.request(proto::OpenContext {
project_id,
context_id: context_id.to_proto(),
});
let prompt_builder = self.prompt_builder.clone();
cx.spawn(|this, mut cx| async move {
let response = request.await?;
let context_proto = response.context.context("invalid context")?;
let context = cx.new_model(|cx| {
Context::new(
context_id.clone(),
replica_id,
capability,
language_registry,
prompt_builder,
Some(project),
Some(telemetry),
cx,
)
})?;
let operations = cx
.background_executor()
.spawn(async move {
context_proto
.operations
.into_iter()
.map(|op| ContextOperation::from_proto(op))
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
this.register_context(&context, cx);
this.synchronize_contexts(cx);
context
}
})
})
}
fn register_context(&mut self, context: &Model<Context>, cx: &mut ModelContext<Self>) {
let handle = if self.project_is_shared {
ContextHandle::Strong(context.clone())
} else {
ContextHandle::Weak(context.downgrade())
};
self.contexts.push(handle);
self.advertise_contexts(cx);
cx.subscribe(context, Self::handle_context_event).detach();
}
fn handle_context_event(
&mut self,
context: Model<Context>,
event: &ContextEvent,
cx: &mut ModelContext<Self>,
) {
let Some(project_id) = self.project.read(cx).remote_id() else {
return;
};
match event {
ContextEvent::SummaryChanged => {
self.advertise_contexts(cx);
}
ContextEvent::Operation(operation) => {
let context_id = context.read(cx).id().to_proto();
let operation = operation.to_proto();
self.client
.send(proto::UpdateContext {
project_id,
context_id,
operation: Some(operation),
})
.log_err();
}
_ => {}
}
}
fn advertise_contexts(&self, cx: &AppContext) {
let Some(project_id) = self.project.read(cx).remote_id() else {
return;
};
// For now, only the host can advertise their open contexts.
if self.project.read(cx).is_remote() {
return;
}
let contexts = self
.contexts
.iter()
.rev()
.filter_map(|context| {
let context = context.upgrade()?.read(cx);
if context.replica_id() == ReplicaId::default() {
Some(proto::ContextMetadata {
context_id: context.id().to_proto(),
summary: context.summary().map(|summary| summary.text.clone()),
})
} else {
None
}
})
.collect();
self.client
.send(proto::AdvertiseContexts {
project_id,
contexts,
})
.ok();
}
fn synchronize_contexts(&mut self, cx: &mut ModelContext<Self>) {
let Some(project_id) = self.project.read(cx).remote_id() else {
return;
};
let contexts = self
.contexts
.iter()
.filter_map(|context| {
let context = context.upgrade()?.read(cx);
if context.replica_id() != ReplicaId::default() {
Some(context.version(cx).to_proto(context.id().clone()))
} else {
None
}
})
.collect();
let client = self.client.clone();
let request = self.client.request(proto::SynchronizeContexts {
project_id,
contexts,
});
cx.spawn(|this, cx| async move {
let response = request.await?;
let mut context_ids = Vec::new();
let mut operations = Vec::new();
this.read_with(&cx, |this, cx| {
for context_version_proto in response.contexts {
let context_version = ContextVersion::from_proto(&context_version_proto);
let context_id = ContextId::from_proto(context_version_proto.context_id);
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
context_ids.push(context_id);
operations.push(context.read(cx).serialize_ops(&context_version, cx));
cx.background_executor().spawn(async move {
let saved_context = fs.load(&path).await?;
let saved_context_json = serde_json::from_str::<serde_json::Value>(&saved_context)?;
match saved_context_json
.get("version")
.ok_or_else(|| anyhow!("version not found"))?
{
serde_json::Value::String(version) => match version.as_str() {
SavedContext::VERSION => {
Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
}
}
})?;
let operations = futures::future::join_all(operations).await;
for (context_id, operations) in context_ids.into_iter().zip(operations) {
for operation in operations {
client.send(proto::UpdateContext {
project_id,
context_id: context_id.to_proto(),
operation: Some(operation),
})?;
}
"0.2.0" => {
let saved_context =
serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
Ok(SavedContext {
id: saved_context.id,
zed: saved_context.zed,
version: saved_context.version,
text: saved_context.text,
messages: saved_context.messages,
message_metadata: saved_context.message_metadata,
summary: saved_context.summary,
slash_command_output_sections: Vec::new(),
})
}
"0.1.0" => {
let saved_context =
serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
Ok(SavedContext {
id: saved_context.id,
zed: saved_context.zed,
version: saved_context.version,
text: saved_context.text,
messages: saved_context.messages,
message_metadata: saved_context.message_metadata,
summary: saved_context.summary,
slash_command_output_sections: Vec::new(),
})
}
_ => Err(anyhow!("unrecognized saved context version: {}", version)),
},
_ => Err(anyhow!("version not found on saved context")),
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
pub fn search(&self, query: String, cx: &AppContext) -> Task<Vec<SavedContextMetadata>> {
@@ -702,10 +178,6 @@ impl ContextStore {
})
}
pub fn host_contexts(&self) -> &[RemoteContextMetadata] {
&self.host_contexts
}
fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move {

File diff suppressed because it is too large Load Diff

View File

@@ -1,299 +1,82 @@
use feature_flags::ZedPro;
use gpui::DismissEvent;
use language_model::{LanguageModel, LanguageModelAvailability, LanguageModelRegistry};
use proto::Plan;
use std::sync::Arc;
use ui::ListItemSpacing;
use crate::assistant_settings::AssistantSettings;
use crate::ShowConfiguration;
use crate::{assistant_settings::AssistantSettings, CompletionProvider, ToggleModelSelector};
use fs::Fs;
use gpui::Action;
use gpui::SharedString;
use gpui::Task;
use picker::{Picker, PickerDelegate};
use settings::update_settings_file;
use ui::{prelude::*, ListItem, PopoverMenu, PopoverMenuHandle, PopoverTrigger};
const TRY_ZED_PRO_URL: &str = "https://zed.dev/pro";
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
#[derive(IntoElement)]
pub struct ModelSelector<T: PopoverTrigger> {
handle: Option<PopoverMenuHandle<Picker<ModelPickerDelegate>>>,
pub struct ModelSelector {
handle: PopoverMenuHandle<ContextMenu>,
fs: Arc<dyn Fs>,
trigger: T,
info_text: Option<SharedString>,
}
pub struct ModelPickerDelegate {
fs: Arc<dyn Fs>,
all_models: Vec<ModelInfo>,
filtered_models: Vec<ModelInfo>,
selected_index: usize,
}
#[derive(Clone)]
struct ModelInfo {
model: Arc<dyn LanguageModel>,
provider_icon: IconName,
availability: LanguageModelAvailability,
is_selected: bool,
}
impl<T: PopoverTrigger> ModelSelector<T> {
pub fn new(fs: Arc<dyn Fs>, trigger: T) -> Self {
ModelSelector {
handle: None,
fs,
trigger,
info_text: None,
}
}
pub fn with_handle(mut self, handle: PopoverMenuHandle<Picker<ModelPickerDelegate>>) -> Self {
self.handle = Some(handle);
self
}
pub fn with_info_text(mut self, text: impl Into<SharedString>) -> Self {
self.info_text = Some(text.into());
self
impl ModelSelector {
pub fn new(handle: PopoverMenuHandle<ContextMenu>, fs: Arc<dyn Fs>) -> Self {
ModelSelector { handle, fs }
}
}
impl PickerDelegate for ModelPickerDelegate {
type ListItem = ListItem;
fn match_count(&self) -> usize {
self.filtered_models.len()
}
fn selected_index(&self) -> usize {
self.selected_index
}
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>) {
self.selected_index = ix.min(self.filtered_models.len().saturating_sub(1));
cx.notify();
}
fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc<str> {
"Select a model...".into()
}
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()> {
let all_models = self.all_models.clone();
cx.spawn(|this, mut cx| async move {
let filtered_models = cx
.background_executor()
.spawn(async move {
if query.is_empty() {
all_models
} else {
all_models
.into_iter()
.filter(|model_info| {
model_info
.model
.name()
.0
.to_lowercase()
.contains(&query.to_lowercase())
})
.collect()
}
})
.await;
this.update(&mut cx, |this, cx| {
this.delegate.filtered_models = filtered_models;
this.delegate.set_selected_index(0, cx);
cx.notify();
})
.ok();
})
}
fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext<Picker<Self>>) {
if let Some(model_info) = self.filtered_models.get(self.selected_index) {
let model = model_info.model.clone();
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, move |settings, _| {
settings.set_model(model.clone())
});
// Update the selection status
let selected_model_id = model_info.model.id();
let selected_provider_id = model_info.model.provider_id();
for model in &mut self.all_models {
model.is_selected = model.model.id() == selected_model_id
&& model.model.provider_id() == selected_provider_id;
}
for model in &mut self.filtered_models {
model.is_selected = model.model.id() == selected_model_id
&& model.model.provider_id() == selected_provider_id;
}
cx.emit(DismissEvent);
}
}
fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {}
fn render_match(
&self,
ix: usize,
selected: bool,
cx: &mut ViewContext<Picker<Self>>,
) -> Option<Self::ListItem> {
use feature_flags::FeatureFlagAppExt;
let model_info = self.filtered_models.get(ix)?;
let show_badges = cx.has_flag::<ZedPro>();
Some(
ListItem::new(ix)
.inset(true)
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.start_slot(
div().pr_1().child(
Icon::new(model_info.provider_icon)
.color(Color::Muted)
.size(IconSize::XSmall),
),
)
.child(
h_flex()
.w_full()
.justify_between()
.font_buffer(cx)
.min_w(px(200.))
.child(
h_flex()
.gap_2()
.child(Label::new(model_info.model.name().0.clone()))
.children(match model_info.availability {
LanguageModelAvailability::Public => None,
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
show_badges.then(|| {
Label::new("Pro")
.size(LabelSize::XSmall)
.color(Color::Muted)
})
}
}),
)
.child(div().when(model_info.is_selected, |this| {
this.child(
Icon::new(IconName::Check)
.color(Color::Accent)
.size(IconSize::Small),
)
})),
),
)
}
fn render_footer(&self, cx: &mut ViewContext<Picker<Self>>) -> Option<gpui::AnyElement> {
use feature_flags::FeatureFlagAppExt;
let plan = proto::Plan::ZedPro;
let is_trial = false;
Some(
h_flex()
.w_full()
.border_t_1()
.border_color(cx.theme().colors().border)
.p_1()
.gap_4()
.justify_between()
.when(cx.has_flag::<ZedPro>(), |this| {
this.child(match plan {
// Already a zed pro subscriber
Plan::ZedPro => Button::new("zed-pro", "Zed Pro")
.icon(IconName::ZedAssistant)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, cx| {
cx.dispatch_action(Box::new(zed_actions::OpenAccountSettings))
}),
// Free user
Plan::Free => Button::new(
"try-pro",
if is_trial {
"Upgrade to Pro"
} else {
"Try Pro"
},
)
.on_click(|_, cx| cx.open_url(TRY_ZED_PRO_URL)),
})
})
.child(
Button::new("configure", "Configure")
.icon(IconName::Settings)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.on_click(|_, cx| {
cx.dispatch_action(ShowConfiguration.boxed_clone());
}),
)
.into_any(),
)
}
}
impl<T: PopoverTrigger> RenderOnce for ModelSelector<T> {
impl RenderOnce for ModelSelector {
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
let selected_provider = LanguageModelRegistry::read_global(cx)
.active_provider()
.map(|m| m.id());
let selected_model = LanguageModelRegistry::read_global(cx)
.active_model()
.map(|m| m.id());
let all_models = LanguageModelRegistry::global(cx)
.read(cx)
.providers()
.iter()
.flat_map(|provider| {
let provider_id = provider.id();
let provider_icon = provider.icon();
let selected_model = selected_model.clone();
let selected_provider = selected_provider.clone();
provider.provided_models(cx).into_iter().map(move |model| {
let model = model.clone();
ModelInfo {
model: model.clone(),
provider_icon,
availability: model.availability(),
is_selected: selected_model.as_ref() == Some(&model.id())
&& selected_provider.as_ref() == Some(&provider_id),
}
})
})
.collect::<Vec<_>>();
let delegate = ModelPickerDelegate {
fs: self.fs.clone(),
all_models: all_models.clone(),
filtered_models: all_models,
selected_index: 0,
};
let picker_view = cx.new_view(|cx| {
let picker = Picker::uniform_list(delegate, cx).max_height(Some(rems(20.).into()));
picker
});
PopoverMenu::new("model-switcher")
.menu(move |_cx| Some(picker_view.clone()))
.trigger(self.trigger)
.with_handle(self.handle)
.menu(move |cx| {
ContextMenu::build(cx, |mut menu, cx| {
for model in CompletionProvider::global(cx).available_models(cx) {
menu = menu.custom_entry(
{
let model = model.clone();
move |_| Label::new(model.display_name()).into_any_element()
},
{
let fs = self.fs.clone();
let model = model.clone();
move |cx| {
let model = model.clone();
update_settings_file::<AssistantSettings>(
fs.clone(),
cx,
move |settings| settings.set_model(model),
);
}
},
);
}
menu
})
.into()
})
.trigger(
ButtonLike::new("active-model")
.style(ButtonStyle::Subtle)
.child(
h_flex()
.w_full()
.gap_0p5()
.child(
div()
.overflow_x_hidden()
.flex_grow()
.whitespace_nowrap()
.child(
Label::new(
CompletionProvider::global(cx).model().display_name(),
)
.size(LabelSize::Small)
.color(Color::Muted),
),
)
.child(
Icon::new(IconName::ChevronDown)
.color(Color::Muted)
.size(IconSize::XSmall),
),
)
.tooltip(move |cx| {
Tooltip::for_action("Change Model", &ToggleModelSelector, cx)
}),
)
.attach(gpui::AnchorCorner::BottomLeft)
}
}

View File

@@ -1,7 +1,9 @@
use crate::{
slash_command::SlashCommandCompletionProvider, AssistantPanel, InlineAssist, InlineAssistant,
slash_command::SlashCommandCompletionProvider, AssistantPanel, CompletionProvider,
InlineAssist, InlineAssistant, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use anyhow::{anyhow, Result};
use assistant_slash_command::SlashCommandRegistry;
use chrono::{DateTime, Utc};
use collections::{HashMap, HashSet};
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle};
@@ -15,14 +17,8 @@ use gpui::{
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
};
use heed::{
types::{SerdeBincode, SerdeJson, Str},
Database, RoTxn,
};
use heed::{types::SerdeBincode, Database, RoTxn};
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use parking_lot::RwLock;
use picker::{Picker, PickerDelegate};
use rope::Rope;
@@ -35,7 +31,6 @@ use std::{
sync::{atomic::AtomicBool, Arc},
time::Duration,
};
use text::LineEnding;
use theme::ThemeSettings;
use ui::{
div, prelude::*, IconButtonShape, ListItem, ListItemSpacing, ParentElement, Render,
@@ -66,11 +61,6 @@ pub fn init(cx: &mut AppContext) {
cx.set_global(GlobalPromptStore(prompt_store_future))
}
const BUILT_IN_TOOLTIP_TEXT: &'static str = concat!(
"This prompt supports special functionality.\n",
"It's read-only, but you can remove it from your default prompt."
);
/// This function opens a new prompt library window if one doesn't exist already.
/// If one exists, it brings it to the foreground.
///
@@ -238,29 +228,15 @@ impl PickerDelegate for PromptPickerDelegate {
.end_hover_slot(
h_flex()
.gap_2()
.child(if prompt_id.is_built_in() {
div()
.id("built-in-prompt")
.child(Icon::new(IconName::FileLock).color(Color::Muted))
.tooltip(move |cx| {
Tooltip::with_meta(
"Built-in prompt",
None,
BUILT_IN_TOOLTIP_TEXT,
cx,
)
})
.into_any()
} else {
.child(
IconButton::new("delete-prompt", IconName::Trash)
.icon_color(Color::Muted)
.shape(IconButtonShape::Square)
.tooltip(move |cx| Tooltip::text("Delete Prompt", cx))
.on_click(cx.listener(move |_, _, cx| {
cx.emit(PromptPickerEvent::Deleted { prompt_id })
}))
.into_any_element()
})
})),
)
.child(
IconButton::new("toggle-default-prompt", IconName::Sparkle)
.selected(default)
@@ -293,7 +269,7 @@ impl PickerDelegate for PromptPickerDelegate {
.flex_none()
.py_1()
.px_2()
.mx_1()
.mx_2()
.child(editor.clone())
}
}
@@ -373,10 +349,6 @@ impl PromptLibrary {
pub fn save_prompt(&mut self, prompt_id: PromptId, cx: &mut ViewContext<Self>) {
const SAVE_THROTTLE: Duration = Duration::from_millis(500);
if prompt_id.is_built_in() {
return;
}
let prompt_metadata = self.store.metadata(prompt_id).unwrap();
let prompt_editor = self.prompt_editors.get_mut(&prompt_id).unwrap();
let title = prompt_editor.title_editor.read(cx).text(cx);
@@ -476,6 +448,7 @@ impl PromptLibrary {
self.set_active_prompt(Some(prompt_id), cx);
} else if let Some(prompt_metadata) = self.store.metadata(prompt_id) {
let language_registry = self.language_registry.clone();
let commands = SlashCommandRegistry::global(cx);
let prompt = self.store.load(prompt_id);
self.pending_load = cx.spawn(|this, mut cx| async move {
let prompt = prompt.await;
@@ -486,7 +459,6 @@ impl PromptLibrary {
let mut editor = Editor::auto_width(cx);
editor.set_placeholder_text("Untitled", cx);
editor.set_text(prompt_metadata.title.unwrap_or_default(), cx);
editor.set_read_only(prompt_id.is_built_in());
editor
});
let body_editor = cx.new_view(|cx| {
@@ -498,7 +470,6 @@ impl PromptLibrary {
});
let mut editor = Editor::for_buffer(buffer, None, cx);
editor.set_read_only(prompt_id.is_built_in());
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx);
editor.set_show_wrap_guides(false, cx);
@@ -506,7 +477,7 @@ impl PromptLibrary {
editor.set_use_modal_editing(false);
editor.set_current_line_highlight(Some(CurrentLineHighlight::None));
editor.set_completion_provider(Box::new(
SlashCommandCompletionProvider::new(None, None),
SlashCommandCompletionProvider::new(commands, None, None),
));
if focus {
editor.focus(cx);
@@ -658,21 +629,17 @@ impl PromptLibrary {
self.picker.update(cx, |picker, cx| picker.focus(cx));
}
pub fn inline_assist(&mut self, action: &InlineAssist, cx: &mut ViewContext<Self>) {
pub fn inline_assist(&mut self, _: &InlineAssist, cx: &mut ViewContext<Self>) {
let Some(active_prompt_id) = self.active_prompt_id else {
cx.propagate();
return;
};
let prompt_editor = &self.prompt_editors[&active_prompt_id].body_editor;
let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
return;
};
let initial_prompt = action.prompt.clone();
if provider.is_authenticated(cx) {
let provider = CompletionProvider::global(cx);
if provider.is_authenticated() {
InlineAssistant::update_global(cx, |assistant, cx| {
assistant.assist(&prompt_editor, None, None, initial_prompt, cx)
assistant.assist(&prompt_editor, None, None, cx)
})
} else {
for window in cx.windows() {
@@ -757,9 +724,6 @@ impl PromptLibrary {
}
fn count_tokens(&mut self, prompt_id: PromptId, cx: &mut ViewContext<Self>) {
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
return;
};
if let Some(prompt) = self.prompt_editors.get_mut(&prompt_id) {
let editor = &prompt.body_editor.read(cx);
let buffer = &editor.buffer().read(cx).as_singleton().unwrap().read(cx);
@@ -771,8 +735,11 @@ impl PromptLibrary {
cx.background_executor().timer(DEBOUNCE_TIMEOUT).await;
let token_count = cx
.update(|cx| {
model.count_tokens(
let provider = CompletionProvider::global(cx);
let model = provider.model();
provider.count_tokens(
LanguageModelRequest {
model,
messages: vec![LanguageModelRequestMessage {
role: Role::System,
content: body.to_string(),
@@ -784,7 +751,6 @@ impl PromptLibrary {
)
})?
.await?;
this.update(&mut cx, |this, cx| {
let prompt_editor = this.prompt_editors.get_mut(&prompt_id).unwrap();
prompt_editor.token_count = Some(token_count);
@@ -802,7 +768,6 @@ impl PromptLibrary {
.capture_action(cx.listener(Self::focus_active_prompt))
.bg(cx.theme().colors().panel_background)
.h_full()
.px_1()
.w_1_3()
.overflow_x_hidden()
.child(
@@ -839,7 +804,7 @@ impl PromptLibrary {
let prompt_metadata = self.store.metadata(prompt_id)?;
let prompt_editor = &self.prompt_editors[&prompt_id];
let focus_handle = prompt_editor.body_editor.focus_handle(cx);
let model = LanguageModelRegistry::read_global(cx).active_model();
let current_model = CompletionProvider::global(cx).model();
let settings = ThemeSettings::get_global(cx);
Some(
@@ -949,12 +914,8 @@ impl PromptLibrary {
None,
format!(
"Model: {}",
model
.as_ref()
.map(|model| model
.name()
.0)
.unwrap_or_default()
current_model
.display_name()
),
cx,
)
@@ -968,23 +929,7 @@ impl PromptLibrary {
)
},
))
.child(if prompt_id.is_built_in() {
div()
.id("built-in-prompt")
.child(
Icon::new(IconName::FileLock)
.color(Color::Muted),
)
.tooltip(move |cx| {
Tooltip::with_meta(
"Built-in prompt",
None,
BUILT_IN_TOOLTIP_TEXT,
cx,
)
})
.into_any()
} else {
.child(
IconButton::new(
"delete-prompt",
IconName::Trash,
@@ -1002,9 +947,8 @@ impl PromptLibrary {
})
.on_click(|_, cx| {
cx.dispatch_action(Box::new(DeletePrompt));
})
.into_any_element()
})
}),
)
.child(
IconButton::new(
"duplicate-prompt",
@@ -1107,30 +1051,20 @@ pub struct PromptMetadata {
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(tag = "kind")]
pub enum PromptId {
User { uuid: Uuid },
EditWorkflow,
}
pub struct PromptId(Uuid);
impl PromptId {
pub fn new() -> PromptId {
PromptId::User {
uuid: Uuid::new_v4(),
}
}
pub fn is_built_in(&self) -> bool {
!matches!(self, PromptId::User { .. })
PromptId(Uuid::new_v4())
}
}
pub struct PromptStore {
executor: BackgroundExecutor,
env: heed::Env,
bodies: Database<SerdeBincode<PromptId>, SerdeBincode<String>>,
metadata: Database<SerdeBincode<PromptId>, SerdeBincode<PromptMetadata>>,
metadata_cache: RwLock<MetadataCache>,
metadata: Database<SerdeJson<PromptId>, SerdeJson<PromptMetadata>>,
bodies: Database<SerdeJson<PromptId>, Str>,
}
#[derive(Default)]
@@ -1141,7 +1075,7 @@ struct MetadataCache {
impl MetadataCache {
fn from_db(
db: Database<SerdeJson<PromptId>, SerdeJson<PromptMetadata>>,
db: Database<SerdeBincode<PromptId>, SerdeBincode<PromptMetadata>>,
txn: &RoTxn,
) -> Result<Self> {
let mut cache = MetadataCache::default();
@@ -1193,123 +1127,35 @@ impl PromptStore {
let db_env = unsafe {
heed::EnvOpenOptions::new()
.map_size(1024 * 1024 * 1024) // 1GB
.max_dbs(4) // Metadata and bodies (possibly v1 of both as well)
.max_dbs(2) // bodies and metadata
.open(db_path)?
};
let mut txn = db_env.write_txn()?;
let metadata = db_env.create_database(&mut txn, Some("metadata.v2"))?;
let bodies = db_env.create_database(&mut txn, Some("bodies.v2"))?;
// Remove edit workflow prompt, as we decided to opt into it using
// a slash command instead.
metadata.delete(&mut txn, &PromptId::EditWorkflow).ok();
bodies.delete(&mut txn, &PromptId::EditWorkflow).ok();
txn.commit()?;
Self::upgrade_dbs(&db_env, metadata, bodies).log_err();
let txn = db_env.read_txn()?;
let bodies = db_env.create_database(&mut txn, Some("bodies"))?;
let metadata = db_env.create_database(&mut txn, Some("metadata"))?;
let metadata_cache = MetadataCache::from_db(metadata, &txn)?;
txn.commit()?;
Ok(PromptStore {
executor,
env: db_env,
metadata_cache: RwLock::new(metadata_cache),
metadata,
bodies,
metadata,
metadata_cache: RwLock::new(metadata_cache),
})
}
})
}
fn upgrade_dbs(
env: &heed::Env,
metadata_db: heed::Database<SerdeJson<PromptId>, SerdeJson<PromptMetadata>>,
bodies_db: heed::Database<SerdeJson<PromptId>, Str>,
) -> Result<()> {
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub struct PromptIdV1(Uuid);
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PromptMetadataV1 {
pub id: PromptIdV1,
pub title: Option<SharedString>,
pub default: bool,
pub saved_at: DateTime<Utc>,
}
let mut txn = env.write_txn()?;
let Some(bodies_v1_db) = env
.open_database::<SerdeBincode<PromptIdV1>, SerdeBincode<String>>(
&txn,
Some("bodies"),
)?
else {
return Ok(());
};
let mut bodies_v1 = bodies_v1_db
.iter(&txn)?
.collect::<heed::Result<HashMap<_, _>>>()?;
let Some(metadata_v1_db) = env
.open_database::<SerdeBincode<PromptIdV1>, SerdeBincode<PromptMetadataV1>>(
&txn,
Some("metadata"),
)?
else {
return Ok(());
};
let metadata_v1 = metadata_v1_db
.iter(&txn)?
.collect::<heed::Result<HashMap<_, _>>>()?;
for (prompt_id_v1, metadata_v1) in metadata_v1 {
let prompt_id_v2 = PromptId::User {
uuid: prompt_id_v1.0,
};
let Some(body_v1) = bodies_v1.remove(&prompt_id_v1) else {
continue;
};
if metadata_db
.get(&txn, &prompt_id_v2)?
.map_or(true, |metadata_v2| {
metadata_v1.saved_at > metadata_v2.saved_at
})
{
metadata_db.put(
&mut txn,
&prompt_id_v2,
&PromptMetadata {
id: prompt_id_v2,
title: metadata_v1.title.clone(),
default: metadata_v1.default,
saved_at: metadata_v1.saved_at,
},
)?;
bodies_db.put(&mut txn, &prompt_id_v2, &body_v1)?;
}
}
txn.commit()?;
Ok(())
}
pub fn load(&self, id: PromptId) -> Task<Result<String>> {
let env = self.env.clone();
let bodies = self.bodies;
self.executor.spawn(async move {
let txn = env.read_txn()?;
let mut prompt = bodies
bodies
.get(&txn, &id)?
.ok_or_else(|| anyhow!("prompt not found"))?
.into();
LineEnding::normalize(&mut prompt);
Ok(prompt)
.ok_or_else(|| anyhow!("prompt not found"))
})
}
@@ -1398,10 +1244,6 @@ impl PromptStore {
default: bool,
body: Rope,
) -> Task<Result<()>> {
if id.is_built_in() {
return Task::ready(Err(anyhow!("built-in prompts cannot be saved")));
}
let prompt_metadata = PromptMetadata {
id,
title,
@@ -1429,26 +1271,16 @@ impl PromptStore {
fn save_metadata(
&self,
id: PromptId,
mut title: Option<SharedString>,
title: Option<SharedString>,
default: bool,
) -> Task<Result<()>> {
let mut cache = self.metadata_cache.write();
if id.is_built_in() {
title = cache
.metadata_by_id
.get(&id)
.and_then(|metadata| metadata.title.clone());
}
let prompt_metadata = PromptMetadata {
id,
title,
default,
saved_at: Utc::now(),
};
cache.insert(prompt_metadata.clone());
self.metadata_cache.write().insert(prompt_metadata.clone());
let db_connection = self.env.clone();
let metadata = self.metadata;

View File

@@ -1,245 +1,135 @@
use assets::Assets;
use fs::Fs;
use futures::StreamExt;
use handlebars::{Handlebars, RenderError, TemplateError};
use language::BufferSnapshot;
use parking_lot::Mutex;
use serde::Serialize;
use std::{ops::Range, sync::Arc, time::Duration};
use util::ResultExt;
use std::{fmt::Write, ops::Range};
#[derive(Serialize)]
pub struct ContentPromptContext {
pub content_type: String,
pub language_name: Option<String>,
pub is_insert: bool,
pub is_truncated: bool,
pub document_content: String,
pub user_prompt: String,
pub rewrite_section: Option<String>,
}
pub fn generate_content_prompt(
user_prompt: String,
language_name: Option<&str>,
buffer: BufferSnapshot,
range: Range<usize>,
_project_name: Option<String>,
) -> anyhow::Result<String> {
let mut prompt = String::new();
#[derive(Serialize)]
pub struct TerminalAssistantPromptContext {
pub os: String,
pub arch: String,
pub shell: Option<String>,
pub working_directory: Option<String>,
pub latest_output: Vec<String>,
pub user_prompt: String,
}
pub struct PromptBuilder {
handlebars: Arc<Mutex<Handlebars<'static>>>,
}
impl PromptBuilder {
pub fn new(
fs_and_cx: Option<(Arc<dyn Fs>, &gpui::AppContext)>,
) -> Result<Self, Box<TemplateError>> {
let mut handlebars = Handlebars::new();
Self::register_templates(&mut handlebars)?;
let handlebars = Arc::new(Mutex::new(handlebars));
if let Some((fs, cx)) = fs_and_cx {
Self::watch_fs_for_template_overrides(fs, cx, handlebars.clone());
let content_type = match language_name {
None | Some("Markdown" | "Plain Text") => {
writeln!(
prompt,
"Here's a file of text that I'm going to ask you to make an edit to."
)?;
"text"
}
Ok(Self { handlebars })
}
fn watch_fs_for_template_overrides(
fs: Arc<dyn Fs>,
cx: &gpui::AppContext,
handlebars: Arc<Mutex<Handlebars<'static>>>,
) {
let templates_dir = paths::prompt_templates_dir();
cx.background_executor()
.spawn(async move {
// Create the prompt templates directory if it doesn't exist
if !fs.is_dir(templates_dir).await {
if let Err(e) = fs.create_dir(templates_dir).await {
log::error!("Failed to create prompt templates directory: {}", e);
return;
}
}
// Initial scan of the prompts directory
if let Ok(mut entries) = fs.read_dir(templates_dir).await {
while let Some(Ok(file_path)) = entries.next().await {
if file_path.to_string_lossy().ends_with(".hbs") {
if let Ok(content) = fs.load(&file_path).await {
let file_name = file_path.file_stem().unwrap().to_string_lossy();
match handlebars.lock().register_template_string(&file_name, content) {
Ok(_) => {
log::info!(
"Successfully registered template override: {} ({})",
file_name,
file_path.display()
);
},
Err(e) => {
log::error!(
"Failed to register template during initial scan: {} ({})",
e,
file_path.display()
);
},
}
}
}
}
}
// Watch for changes
let (mut changes, watcher) = fs.watch(templates_dir, Duration::from_secs(1)).await;
while let Some(changed_paths) = changes.next().await {
for changed_path in changed_paths {
if changed_path.extension().map_or(false, |ext| ext == "hbs") {
log::info!("Reloading template: {}", changed_path.display());
if let Some(content) = fs.load(&changed_path).await.log_err() {
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
let file_path = changed_path.to_string_lossy();
match handlebars.lock().register_template_string(&file_name, content) {
Ok(_) => log::info!(
"Successfully reloaded template: {} ({})",
file_name,
file_path
),
Err(e) => log::error!(
"Failed to register template: {} ({})",
e,
file_path
),
}
}
}
}
}
drop(watcher);
})
.detach();
}
fn register_templates(handlebars: &mut Handlebars) -> Result<(), Box<TemplateError>> {
let mut register_template = |id: &str| {
let prompt = Assets::get(&format!("prompts/{}.hbs", id))
.unwrap_or_else(|| panic!("{} prompt template not found", id))
.data;
handlebars
.register_template_string(id, String::from_utf8_lossy(&prompt))
.map_err(Box::new)
};
register_template("content_prompt")?;
register_template("terminal_assistant_prompt")?;
register_template("edit_workflow")?;
register_template("step_resolution")?;
Ok(())
}
pub fn generate_content_prompt(
&self,
user_prompt: String,
language_name: Option<&str>,
buffer: BufferSnapshot,
range: Range<usize>,
) -> Result<String, RenderError> {
let content_type = match language_name {
None | Some("Markdown" | "Plain Text") => "text",
Some(_) => "code",
};
const MAX_CTX: usize = 50000;
let is_insert = range.is_empty();
let mut is_truncated = false;
let before_range = 0..range.start;
let truncated_before = if before_range.len() > MAX_CTX {
is_truncated = true;
range.start - MAX_CTX..range.start
} else {
before_range
};
let after_range = range.end..buffer.len();
let truncated_after = if after_range.len() > MAX_CTX {
is_truncated = true;
range.end..range.end + MAX_CTX
} else {
after_range
};
let mut document_content = String::new();
for chunk in buffer.text_for_range(truncated_before) {
document_content.push_str(chunk);
Some(language_name) => {
writeln!(
prompt,
"Here's a file of {language_name} that I'm going to ask you to make an edit to."
)?;
"code"
}
if is_insert {
document_content.push_str("<insert_here></insert_here>");
} else {
document_content.push_str("<rewrite_this>\n");
};
const MAX_CTX: usize = 50000;
let mut is_truncated = false;
if range.is_empty() {
prompt.push_str("The point you'll need to insert at is marked with <insert_here></insert_here>.\n\n<document>");
} else {
prompt.push_str("The section you'll need to rewrite is marked with <rewrite_this></rewrite_this> tags.\n\n<document>");
}
// Include file content.
let before_range = 0..range.start;
let truncated_before = if before_range.len() > MAX_CTX {
is_truncated = true;
range.start - MAX_CTX..range.start
} else {
before_range
};
let mut non_rewrite_len = truncated_before.len();
for chunk in buffer.text_for_range(truncated_before) {
prompt.push_str(chunk);
}
if !range.is_empty() {
prompt.push_str("<rewrite_this>\n");
for chunk in buffer.text_for_range(range.clone()) {
prompt.push_str(chunk);
}
prompt.push_str("\n<rewrite_this>");
} else {
prompt.push_str("<insert_here></insert_here>");
}
let after_range = range.end..buffer.len();
let truncated_after = if after_range.len() > MAX_CTX {
is_truncated = true;
range.end..range.end + MAX_CTX
} else {
after_range
};
non_rewrite_len += truncated_after.len();
for chunk in buffer.text_for_range(truncated_after) {
prompt.push_str(chunk);
}
write!(prompt, "</document>\n\n").unwrap();
if is_truncated {
writeln!(prompt, "The context around the relevant section has been truncated (possibly in the middle of a line) for brevity.\n")?;
}
if range.is_empty() {
writeln!(
prompt,
"You can't replace {content_type}, your answer will be inserted in place of the `<insert_here></insert_here>` tags. Don't include the insert_here tags in your output.",
)
.unwrap();
writeln!(
prompt,
"Generate {content_type} based on the following prompt:\n\n<prompt>\n{user_prompt}\n</prompt>",
)
.unwrap();
writeln!(prompt, "Match the indentation in the original file in the inserted {content_type}, don't include any indentation on blank lines.\n").unwrap();
prompt.push_str("Immediately start with the following format with no remarks:\n\n```\n{{INSERTED_CODE}}\n```");
} else {
writeln!(prompt, "Edit the section of {content_type} in <rewrite_this></rewrite_this> tags based on the following prompt:'").unwrap();
writeln!(prompt, "\n<prompt>\n{user_prompt}\n</prompt>\n").unwrap();
let rewrite_len = range.end - range.start;
if rewrite_len < 20000 && rewrite_len * 2 < non_rewrite_len {
writeln!(prompt, "And here's the section to rewrite based on that prompt again for reference:\n\n<rewrite_this>\n").unwrap();
for chunk in buffer.text_for_range(range.clone()) {
document_content.push_str(chunk);
prompt.push_str(chunk);
}
document_content.push_str("\n</rewrite_this>");
writeln!(prompt, "\n</rewrite_this>\n").unwrap();
}
for chunk in buffer.text_for_range(truncated_after) {
document_content.push_str(chunk);
}
let rewrite_section = if !is_insert {
let mut section = String::new();
for chunk in buffer.text_for_range(range.clone()) {
section.push_str(chunk);
}
Some(section)
} else {
None
};
let context = ContentPromptContext {
content_type: content_type.to_string(),
language_name: language_name.map(|s| s.to_string()),
is_insert,
is_truncated,
document_content,
user_prompt,
rewrite_section,
};
self.handlebars.lock().render("content_prompt", &context)
writeln!(prompt, "Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {content_type} will be preserved.\n").unwrap();
write!(
prompt,
"Start at the indentation level in the original file in the rewritten {content_type}. "
)
.unwrap();
prompt.push_str("Don't stop until you've rewritten the entire section, even if you have no more changes to make, always write out the whole section with no unnecessary elisions.");
prompt.push_str("\n\nImmediately start with the following format with no remarks:\n\n```\n{{REWRITTEN_CODE}}\n```");
}
pub fn generate_terminal_assistant_prompt(
&self,
user_prompt: &str,
shell: Option<&str>,
working_directory: Option<&str>,
latest_output: &[String],
) -> Result<String, RenderError> {
let context = TerminalAssistantPromptContext {
os: std::env::consts::OS.to_string(),
arch: std::env::consts::ARCH.to_string(),
shell: shell.map(|s| s.to_string()),
working_directory: working_directory.map(|s| s.to_string()),
latest_output: latest_output.to_vec(),
user_prompt: user_prompt.to_string(),
};
self.handlebars
.lock()
.render("terminal_assistant_prompt", &context)
}
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
self.handlebars.lock().render("edit_workflow", &())
}
pub fn generate_step_resolution_prompt(&self) -> Result<String, RenderError> {
self.handlebars.lock().render("step_resolution", &())
}
Ok(prompt)
}
pub fn generate_terminal_assistant_prompt(
user_prompt: &str,
shell: Option<&str>,
working_directory: Option<&str>,
) -> String {
let mut prompt = String::new();
writeln!(&mut prompt, "You are an expert terminal user.").unwrap();
writeln!(&mut prompt, "You will be given a description of a command and you need to respond with a command that matches the description.").unwrap();
writeln!(&mut prompt, "Do not include markdown blocks or any other text formatting in your response, always respond with a single command that can be executed in the given shell.").unwrap();
if let Some(shell) = shell {
writeln!(&mut prompt, "Current shell is '{shell}'.").unwrap();
}
if let Some(working_directory) = working_directory {
writeln!(
&mut prompt,
"Current working directory is '{working_directory}'."
)
.unwrap();
}
writeln!(&mut prompt, "Here is the description of the command:").unwrap();
prompt.push_str(user_prompt);
prompt
}

View File

@@ -0,0 +1,171 @@
use language::Rope;
use std::ops::Range;
/// Search the given buffer for the given substring, ignoring any differences
/// in line indentation between the query and the buffer.
///
/// Returns a vector of ranges of byte offsets in the buffer corresponding
/// to the entire lines of the buffer.
pub fn fuzzy_search_lines(haystack: &Rope, needle: &str) -> Option<Range<usize>> {
const SIMILARITY_THRESHOLD: f64 = 0.8;
let mut best_match: Option<(Range<usize>, f64)> = None; // (range, score)
let mut haystack_lines = haystack.chunks().lines();
let mut haystack_line_start = 0;
while let Some(mut haystack_line) = haystack_lines.next() {
let next_haystack_line_start = haystack_line_start + haystack_line.len() + 1;
let mut advanced_to_next_haystack_line = false;
let mut matched = true;
let match_start = haystack_line_start;
let mut match_end = next_haystack_line_start;
let mut match_score = 0.0;
let mut needle_lines = needle.lines().peekable();
while let Some(needle_line) = needle_lines.next() {
let similarity = line_similarity(haystack_line, needle_line);
if similarity >= SIMILARITY_THRESHOLD {
match_end = haystack_lines.offset();
match_score += similarity;
if needle_lines.peek().is_some() {
if let Some(next_haystack_line) = haystack_lines.next() {
advanced_to_next_haystack_line = true;
haystack_line = next_haystack_line;
} else {
matched = false;
break;
}
} else {
break;
}
} else {
matched = false;
break;
}
}
if matched
&& best_match
.as_ref()
.map(|(_, best_score)| match_score > *best_score)
.unwrap_or(true)
{
best_match = Some((match_start..match_end, match_score));
}
if advanced_to_next_haystack_line {
haystack_lines.seek(next_haystack_line_start);
}
haystack_line_start = next_haystack_line_start;
}
best_match.map(|(range, _)| range)
}
/// Calculates the similarity between two lines, ignoring leading and trailing whitespace,
/// using the Jaro-Winkler distance.
///
/// Returns a value between 0.0 and 1.0, where 1.0 indicates an exact match.
fn line_similarity(line1: &str, line2: &str) -> f64 {
strsim::jaro_winkler(line1.trim(), line2.trim())
}
#[cfg(test)]
mod test {
use super::*;
use gpui::{AppContext, Context as _};
use language::Buffer;
use unindent::Unindent as _;
use util::test::marked_text_ranges;
#[gpui::test]
fn test_fuzzy_search_lines(cx: &mut AppContext) {
let (text, expected_ranges) = marked_text_ranges(
&r#"
fn main() {
if a() {
assert_eq!(
1 + 2,
does_not_match,
);
}
println!("hi");
assert_eq!(
1 + 2,
3,
); // this last line does not match
« assert_eq!(
1 + 2,
3,
);
»
« assert_eq!(
"something",
"else",
);
»
}
"#
.unindent(),
false,
);
let buffer = cx.new_model(|cx| Buffer::local(&text, cx));
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
1 + 2,
3,
);
"
.unindent(),
)
.unwrap();
assert_eq!(actual_range, expected_ranges[0]);
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
1 + 2,
3,
);
"
.unindent(),
)
.unwrap();
assert_eq!(actual_range, expected_ranges[0]);
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
asst_eq!(
\"something\",
\"els\"
)
"
.unindent(),
)
.unwrap();
assert_eq!(actual_range, expected_ranges[1]);
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
2 + 1,
3,
);
"
.unindent(),
);
assert_eq!(actual_range, None);
}
}

View File

@@ -27,12 +27,11 @@ pub mod now_command;
pub mod project_command;
pub mod prompt_command;
pub mod search_command;
pub mod symbols_command;
pub mod tabs_command;
pub mod term_command;
pub mod workflow_command;
pub(crate) struct SlashCommandCompletionProvider {
commands: Arc<SlashCommandRegistry>,
cancel_flag: Mutex<Arc<AtomicBool>>,
editor: Option<WeakView<ContextEditor>>,
workspace: Option<WeakView<Workspace>>,
@@ -47,12 +46,14 @@ pub(crate) struct SlashCommandLine {
impl SlashCommandCompletionProvider {
pub fn new(
commands: Arc<SlashCommandRegistry>,
editor: Option<WeakView<ContextEditor>>,
workspace: Option<WeakView<Workspace>>,
) -> Self {
Self {
cancel_flag: Mutex::new(Arc::new(AtomicBool::new(false))),
editor,
commands,
workspace,
}
}
@@ -64,8 +65,8 @@ impl SlashCommandCompletionProvider {
name_range: Range<Anchor>,
cx: &mut WindowContext,
) -> Task<Result<Vec<project::Completion>>> {
let commands = SlashCommandRegistry::global(cx);
let candidates = commands
let candidates = self
.commands
.command_names()
.into_iter()
.enumerate()
@@ -75,6 +76,7 @@ impl SlashCommandCompletionProvider {
char_bag: def.as_ref().into(),
})
.collect::<Vec<_>>();
let commands = self.commands.clone();
let command_name = command_name.to_string();
let editor = self.editor.clone();
let workspace = self.workspace.clone();
@@ -153,8 +155,7 @@ impl SlashCommandCompletionProvider {
flag.store(true, SeqCst);
*flag = new_cancel_flag.clone();
let commands = SlashCommandRegistry::global(cx);
if let Some(command) = commands.command(command_name) {
if let Some(command) = self.commands.command(command_name) {
let completions = command.complete_argument(
argument,
new_cancel_flag.clone(),

View File

@@ -46,7 +46,7 @@ impl SlashCommand for ActiveSlashCommand {
self: Arc<Self>,
_argument: Option<&str>,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {

View File

@@ -44,7 +44,7 @@ impl SlashCommand for DefaultSlashCommand {
self: Arc<Self>,
_argument: Option<&str>,
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let store = PromptStore::global(cx);

View File

@@ -33,7 +33,7 @@ impl DiagnosticsSlashCommand {
if query.is_empty() {
let workspace = workspace.read(cx);
let entries = workspace.recent_navigation_history(Some(10), cx);
let path_prefix: Arc<str> = Arc::default();
let path_prefix: Arc<str> = "".into();
Task::ready(
entries
.into_iter()
@@ -158,7 +158,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
self: Arc<Self>,
argument: Option<&str>,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
@@ -284,7 +284,7 @@ fn collect_diagnostics(
PathBuf::try_from(path)
.ok()
.and_then(|path| {
project.read(cx).worktrees(cx).find_map(|worktree| {
project.read(cx).worktrees().find_map(|worktree| {
let worktree = worktree.read(cx);
let worktree_root_path = Path::new(worktree.root_name());
let relative_path = path.strip_prefix(worktree_root_path).ok()?;

View File

@@ -1,16 +1,14 @@
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::time::Duration;
use anyhow::{anyhow, bail, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
};
use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView};
use gpui::{AppContext, Model, Task, WeakView};
use indexed_docs::{
DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName,
ProviderId,
IndexedDocsRegistry, IndexedDocsStore, LocalProvider, PackageName, ProviderId, RustdocIndexer,
};
use language::LspAdapterDelegate;
use project::{Project, ProjectPath};
@@ -24,7 +22,7 @@ impl DocsSlashCommand {
pub const NAME: &'static str = "docs";
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
let worktree = project.read(cx).worktrees(cx).next()?;
let worktree = project.read(cx).worktrees().next()?;
let worktree = worktree.read(cx);
let entry = worktree.entry_for_path("Cargo.toml")?;
let path = ProjectPath {
@@ -36,22 +34,22 @@ impl DocsSlashCommand {
))
}
/// Ensures that the indexed doc providers for Rust are registered.
/// Ensures that the rustdoc provider is registered.
///
/// Ideally we would do this sooner, but we need to wait until we're able to
/// access the workspace so we can read the project.
fn ensure_rust_doc_providers_are_registered(
fn ensure_rustdoc_provider_is_registered(
&self,
workspace: Option<WeakView<Workspace>>,
cx: &mut AppContext,
) {
let indexed_docs_registry = IndexedDocsRegistry::global(cx);
if indexed_docs_registry
.get_provider_store(LocalRustdocProvider::id())
.get_provider_store(ProviderId::rustdoc())
.is_none()
{
let index_provider_deps = maybe!({
let workspace = workspace.clone().ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace was dropped"))?;
@@ -65,80 +63,11 @@ impl DocsSlashCommand {
});
if let Some((fs, cargo_workspace_root)) = index_provider_deps.log_err() {
indexed_docs_registry.register_provider(Box::new(LocalRustdocProvider::new(
fs,
cargo_workspace_root,
)));
indexed_docs_registry.register_provider(Box::new(RustdocIndexer::new(Box::new(
LocalProvider::new(fs, cargo_workspace_root),
))));
}
}
if indexed_docs_registry
.get_provider_store(DocsDotRsProvider::id())
.is_none()
{
let http_client = maybe!({
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace was dropped"))?;
let project = workspace.read(cx).project().clone();
anyhow::Ok(project.read(cx).client().http_client().clone())
});
if let Some(http_client) = http_client.log_err() {
indexed_docs_registry
.register_provider(Box::new(DocsDotRsProvider::new(http_client)));
}
}
}
/// Runs just-in-time indexing for a given package, in case the slash command
/// is run without any entries existing in the index.
fn run_just_in_time_indexing(
store: Arc<IndexedDocsStore>,
key: String,
package: PackageName,
executor: BackgroundExecutor,
) -> Task<()> {
executor.clone().spawn(async move {
let (prefix, needs_full_index) = if let Some((prefix, _)) = key.split_once('*') {
// If we have a wildcard in the search, we want to wait until
// we've completely finished indexing so we get a full set of
// results for the wildcard.
(prefix.to_string(), true)
} else {
(key, false)
};
// If we already have some entries, we assume that we've indexed the package before
// and don't need to do it again.
let has_any_entries = store
.any_with_prefix(prefix.clone())
.await
.unwrap_or_default();
if has_any_entries {
return ();
};
let index_task = store.clone().index(package.clone());
if needs_full_index {
_ = index_task.await;
} else {
loop {
executor.timer(Duration::from_millis(200)).await;
if store
.any_with_prefix(prefix.clone())
.await
.unwrap_or_default()
|| !store.is_indexing(&package)
{
break;
}
}
}
})
}
}
@@ -166,7 +95,7 @@ impl SlashCommand for DocsSlashCommand {
workspace: Option<WeakView<Workspace>>,
cx: &mut AppContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
self.ensure_rust_doc_providers_are_registered(workspace, cx);
self.ensure_rustdoc_provider_is_registered(workspace, cx);
let indexed_docs_registry = IndexedDocsRegistry::global(cx);
let args = DocsSlashCommandArgs::parse(&query);
@@ -192,14 +121,6 @@ impl SlashCommand for DocsSlashCommand {
match args {
DocsSlashCommandArgs::NoProvider => {
let providers = indexed_docs_registry.list_providers();
if providers.is_empty() {
return Ok(vec![ArgumentCompletion {
label: "No available docs providers.".to_string(),
new_text: String::new(),
run_command: false,
}]);
}
Ok(providers
.into_iter()
.map(|provider| ArgumentCompletion {
@@ -219,7 +140,7 @@ impl SlashCommand for DocsSlashCommand {
if index {
// We don't need to hold onto this task, as the `IndexedDocsStore` will hold it
// until it completes.
drop(store.clone().index(package.as_str().into()));
let _ = store.clone().index(package.as_str().into());
}
let items = store.search(package).await;
@@ -242,7 +163,7 @@ impl SlashCommand for DocsSlashCommand {
self: Arc<Self>,
argument: Option<&str>,
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let Some(argument) = argument else {
@@ -250,72 +171,46 @@ impl SlashCommand for DocsSlashCommand {
};
let args = DocsSlashCommandArgs::parse(argument);
let executor = cx.background_executor().clone();
let task = cx.background_executor().spawn({
let text = cx.background_executor().spawn({
let store = args
.provider()
.ok_or_else(|| anyhow!("no docs provider specified"))
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
async move {
let (provider, key) = match args.clone() {
match args {
DocsSlashCommandArgs::NoProvider => bail!("no docs provider specified"),
DocsSlashCommandArgs::SearchPackageDocs {
provider, package, ..
} => (provider, package),
} => {
let store = store?;
let item_docs = store.load(package.clone()).await?;
anyhow::Ok((provider, package, item_docs.to_string()))
}
DocsSlashCommandArgs::SearchItemDocs {
provider,
item_path,
..
} => (provider, item_path),
};
} => {
let store = store?;
let item_docs = store.load(item_path.clone()).await?;
let store = store?;
if let Some(package) = args.package() {
Self::run_just_in_time_indexing(store.clone(), key.clone(), package, executor)
.await;
}
let (text, ranges) = if let Some((prefix, _)) = key.split_once('*') {
let docs = store.load_many_by_prefix(prefix.to_string()).await?;
let mut text = String::new();
let mut ranges = Vec::new();
for (key, docs) in docs {
let prev_len = text.len();
text.push_str(&docs.0);
text.push_str("\n");
ranges.push((key, prev_len..text.len()));
text.push_str("\n");
anyhow::Ok((provider, item_path, item_docs.to_string()))
}
(text, ranges)
} else {
let item_docs = store.load(key.clone()).await?;
let text = item_docs.to_string();
let range = 0..text.len();
(text, vec![(key, range)])
};
anyhow::Ok((provider, text, ranges))
}
}
});
cx.foreground_executor().spawn(async move {
let (provider, text, ranges) = task.await?;
let (provider, path, text) = text.await?;
let range = 0..text.len();
Ok(SlashCommandOutput {
text,
sections: ranges
.into_iter()
.map(|(key, range)| SlashCommandOutputSection {
range,
icon: IconName::FileDoc,
label: format!("docs ({provider}): {key}",).into(),
})
.collect(),
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::FileRust,
label: format!("docs ({provider}): {path}",).into(),
}],
run_commands_in_text: false,
})
})
@@ -326,7 +221,7 @@ fn is_item_path_delimiter(char: char) -> bool {
!char.is_alphanumeric() && char != '-' && char != '_'
}
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq)]
pub(crate) enum DocsSlashCommandArgs {
NoProvider,
SearchPackageDocs {

View File

@@ -10,7 +10,7 @@ use assistant_slash_command::{
use futures::AsyncReadExt;
use gpui::{AppContext, Task, WeakView};
use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler};
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
use http::{AsyncBody, HttpClient, HttpClientWithUrl};
use language::LspAdapterDelegate;
use ui::prelude::*;
use workspace::Workspace;
@@ -27,7 +27,7 @@ pub(crate) struct FetchSlashCommand;
impl FetchSlashCommand {
async fn build_message(http_client: Arc<HttpClientWithUrl>, url: &str) -> Result<String> {
let mut url = url.to_owned();
if !url.starts_with("https://") && !url.starts_with("http://") {
if !url.starts_with("https://") {
url = format!("https://{url}");
}
@@ -129,7 +129,7 @@ impl SlashCommand for FetchSlashCommand {
self: Arc<Self>,
argument: Option<&str>,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let Some(argument) = argument else {

View File

@@ -29,7 +29,7 @@ impl FileSlashCommand {
let workspace = workspace.read(cx);
let project = workspace.project().read(cx);
let entries = workspace.recent_navigation_history(Some(10), cx);
let path_prefix: Arc<str> = Arc::default();
let path_prefix: Arc<str> = "".into();
Task::ready(
entries
.into_iter()
@@ -136,7 +136,7 @@ impl SlashCommand for FileSlashCommand {
self: Arc<Self>,
argument: Option<&str>,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
@@ -188,7 +188,7 @@ fn collect_files(
let project_handle = project.downgrade();
let snapshots = project
.read(cx)
.worktrees(cx)
.worktrees()
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>();
cx.spawn(|mut cx| async move {
@@ -273,25 +273,20 @@ fn collect_files(
continue;
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let buffer_snapshot =
cx.read_model(&buffer, |buffer, _| buffer.snapshot())?;
let snapshot = cx.read_model(&buffer, |buffer, _| buffer.snapshot())?;
let prev_len = text.len();
collect_file_content(
&mut text,
&buffer_snapshot,
path_including_worktree_name.to_string_lossy().to_string(),
);
collect_file_content(&mut text, &snapshot, filename.clone());
text.push('\n');
if !write_single_file_diagnostics(
&mut text,
Some(&path_including_worktree_name),
&buffer_snapshot,
&snapshot,
) {
text.pop();
}
ranges.push((
prev_len..text.len(),
path_including_worktree_name,
PathBuf::from(filename),
EntryType::File,
));
text.push('\n');

View File

@@ -23,7 +23,7 @@ impl SlashCommand for NowSlashCommand {
}
fn menu_text(&self) -> String {
"Insert Current Date and Time".into()
"Insert current date and time".into()
}
fn requires_argument(&self) -> bool {
@@ -44,7 +44,7 @@ impl SlashCommand for NowSlashCommand {
self: Arc<Self>,
_argument: Option<&str>,
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
_cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let now = Local::now();

View File

@@ -75,7 +75,7 @@ impl ProjectSlashCommand {
}
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
let worktree = project.read(cx).worktrees(cx).next()?;
let worktree = project.read(cx).worktrees().next()?;
let worktree = worktree.read(cx);
let entry = worktree.entry_for_path("Cargo.toml")?;
let path = ProjectPath {
@@ -119,7 +119,7 @@ impl SlashCommand for ProjectSlashCommand {
self: Arc<Self>,
_argument: Option<&str>,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {

Some files were not shown because too many files have changed in this diff Show More