Compare commits
6 Commits
v0.158.2
...
buffer-ext
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c822188a53 | ||
|
|
32fe23a182 | ||
|
|
b337f419d3 | ||
|
|
548878d8a0 | ||
|
|
cdb88f52b7 | ||
|
|
14b4dc9a3a |
2
.github/actions/run_tests/action.yml
vendored
@@ -10,7 +10,7 @@ runs:
|
||||
cargo install cargo-nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
23
.github/workflows/bump_collab_staging.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Bump collab-staging Tag
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Fire every day at 16:00 UTC (At the start of the US workday)
|
||||
- cron: "0 16 * * *"
|
||||
|
||||
jobs:
|
||||
update-collab-staging-tag:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update collab-staging tag
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f collab-staging
|
||||
git push origin collab-staging --force
|
||||
7
.github/workflows/bump_patch_version.yml
vendored
@@ -15,10 +15,11 @@ concurrency:
|
||||
jobs:
|
||||
bump_patch_version:
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}
|
||||
@@ -41,7 +42,7 @@ jobs:
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl
|
||||
output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')
|
||||
git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot <hi@zed.dev>"
|
||||
git tag v${output}${tag_suffix}
|
||||
|
||||
201
.github/workflows/ci.yml
vendored
@@ -7,13 +7,9 @@ on:
|
||||
- "v[0-9]+.[0-9]+.x"
|
||||
tags:
|
||||
- "v*"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
@@ -34,7 +30,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -43,7 +39,16 @@ jobs:
|
||||
run: git clean -df
|
||||
|
||||
- name: Check spelling
|
||||
run: script/check-spelling
|
||||
run: |
|
||||
if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then
|
||||
echo "Installing typos-cli@$TYPOS_CLI_VERSION..."
|
||||
cargo install "typos-cli@$TYPOS_CLI_VERSION"
|
||||
else
|
||||
echo "typos-cli@$TYPOS_CLI_VERSION is already installed."
|
||||
fi
|
||||
typos
|
||||
env:
|
||||
TYPOS_CLI_VERSION: "1.23.3"
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
@@ -85,7 +90,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -96,37 +101,26 @@ jobs:
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build collab
|
||||
run: RUSTFLAGS="-D warnings" cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features
|
||||
cargo check -p gpui --features "macos-blade"
|
||||
RUSTFLAGS="-D warnings" cargo build -p remote_server
|
||||
run: cargo build --workspace --bins --all-features; cargo check -p gpui --features "macos-blade"
|
||||
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- self-hosted
|
||||
- deploy
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
@@ -134,33 +128,7 @@ jobs:
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build Zed
|
||||
run: RUSTFLAGS="-D warnings" cargo build -p zed
|
||||
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
|
||||
- name: Install Clang & Mold
|
||||
run: ./script/remote-server && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Build Remote Server
|
||||
run: RUSTFLAGS="-D warnings" cargo build -p remote_server
|
||||
run: cargo build -p zed
|
||||
|
||||
# todo(windows): Actually run the tests
|
||||
windows_tests:
|
||||
@@ -169,7 +137,7 @@ jobs:
|
||||
runs-on: hosted-windows-1
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -177,14 +145,13 @@ jobs:
|
||||
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "github"
|
||||
|
||||
- name: cargo clippy
|
||||
# Windows can't run shell scripts, so we need to use `cargo xtask`.
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: $env:RUSTFLAGS="-D warnings"; cargo build
|
||||
run: cargo build -p zed
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 60
|
||||
@@ -205,12 +172,12 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
# We need to fetch more than one commit so that `script/draft-release-notes`
|
||||
# is able to diff between the current and previous tag.
|
||||
@@ -225,12 +192,29 @@ jobs:
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
set -eu
|
||||
|
||||
- name: Draft release notes
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
version=$(script/get-crate-version zed)
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
echo "Publishing version: ${version} on release channel ${channel}"
|
||||
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
|
||||
|
||||
expected_tag_name=""
|
||||
case ${channel} in
|
||||
stable)
|
||||
expected_tag_name="v${version}";;
|
||||
preview)
|
||||
expected_tag_name="v${version}-pre";;
|
||||
nightly)
|
||||
expected_tag_name="v${version}-nightly";;
|
||||
*)
|
||||
echo "can't publish a release on channel ${channel}"
|
||||
exit 1;;
|
||||
esac
|
||||
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p target/
|
||||
# Ignore any errors that occur while drafting release notes to not fail the build.
|
||||
script/draft-release-notes "$version" "$channel" > target/release-notes.md || true
|
||||
@@ -248,20 +232,20 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (universal) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
@@ -287,32 +271,57 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
- self-hosted
|
||||
- deploy
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
set -eu
|
||||
|
||||
version=$(script/get-crate-version zed)
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
echo "Publishing version: ${version} on release channel ${channel}"
|
||||
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
|
||||
|
||||
expected_tag_name=""
|
||||
case ${channel} in
|
||||
stable)
|
||||
expected_tag_name="v${version}";;
|
||||
preview)
|
||||
expected_tag_name="v${version}-pre";;
|
||||
nightly)
|
||||
expected_tag_name="v${version}-nightly";;
|
||||
*)
|
||||
echo "can't publish a release on channel ${channel}"
|
||||
exit 1;;
|
||||
esac
|
||||
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
@@ -334,7 +343,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Create arm64 Linux bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
- hosted-linux-arm-1
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
@@ -342,24 +351,62 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
- name: "Setup jq"
|
||||
uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
- name: Set up Clang
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y llvm-15 clang-15 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-15/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
- name: rustup
|
||||
run: |
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
set -eu
|
||||
|
||||
version=$(script/get-crate-version zed)
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
echo "Publishing version: ${version} on release channel ${channel}"
|
||||
echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV
|
||||
|
||||
expected_tag_name=""
|
||||
case ${channel} in
|
||||
stable)
|
||||
expected_tag_name="v${version}";;
|
||||
preview)
|
||||
expected_tag_name="v${version}-pre";;
|
||||
nightly)
|
||||
expected_tag_name="v${version}-nightly";;
|
||||
*)
|
||||
echo "can't publish a release on channel ${channel}"
|
||||
exit 1;;
|
||||
esac
|
||||
if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
|
||||
31
.github/workflows/close_stale_issues.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 11 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: >
|
||||
Hi there! 👋
|
||||
|
||||
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 7 days. Feel free to open a new issue if you're seeing this message after the issue has been closed.
|
||||
|
||||
Thanks for your help!
|
||||
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, feel free to ping a Zed team member to reopen this issue or open a new one."
|
||||
# We will increase `days-before-stale` to 365 on or after Jan 24th,
|
||||
# 2024. This date marks one year since migrating issues from
|
||||
# 'community' to 'zed' repository. The migration added activity to all
|
||||
# issues, preventing 365 days from working until then.
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
any-of-issue-labels: "defect,panic / crash"
|
||||
operations-per-run: 1000
|
||||
ascending: true
|
||||
enable-statistics: true
|
||||
stale-issue-label: "stale"
|
||||
4
.github/workflows/danger.yml
vendored
@@ -14,14 +14,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
10
.github/workflows/deploy_cloudflare.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -36,28 +36,28 @@ jobs:
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: pages deploy target/deploy --project-name=docs
|
||||
|
||||
- name: Deploy Install
|
||||
uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
|
||||
|
||||
- name: Deploy Docs Workers
|
||||
uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy .cloudflare/docs-proxy/src/worker.js
|
||||
|
||||
- name: Deploy Install Workers
|
||||
uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3
|
||||
uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
|
||||
38
.github/workflows/deploy_collab.yml
vendored
@@ -8,6 +8,7 @@ on:
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
style:
|
||||
@@ -17,7 +18,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -36,7 +37,7 @@ jobs:
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -60,27 +61,25 @@ jobs:
|
||||
- style
|
||||
- tests
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- self-hosted
|
||||
- deploy
|
||||
steps:
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Sign into DigitalOcean docker registry
|
||||
run: doctl registry login
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Set up default .cargo/config.toml
|
||||
run: cp ./.cargo/collab-config.toml ./.cargo/config.toml
|
||||
|
||||
- name: Build docker image
|
||||
run: |
|
||||
docker build -f Dockerfile-collab \
|
||||
--build-arg GITHUB_SHA=$GITHUB_SHA \
|
||||
--tag registry.digitalocean.com/zed/collab:$GITHUB_SHA \
|
||||
.
|
||||
run: docker build . --build-arg GITHUB_SHA=$GITHUB_SHA --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA
|
||||
|
||||
- name: Publish docker image
|
||||
run: docker push registry.digitalocean.com/zed/collab:${GITHUB_SHA}
|
||||
@@ -93,19 +92,10 @@ jobs:
|
||||
needs:
|
||||
- publish
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- self-hosted
|
||||
- deploy
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
|
||||
- name: Sign into Kubernetes
|
||||
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}
|
||||
|
||||
|
||||
13
.github/workflows/docs.yml
vendored
@@ -14,20 +14,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
- run: pnpm dlx prettier . --check
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier . --write && cd .."
|
||||
false
|
||||
}
|
||||
|
||||
- name: Check spelling
|
||||
run: script/check-spelling docs/
|
||||
|
||||
3
.github/workflows/publish_extension_cli.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -24,7 +24,6 @@ jobs:
|
||||
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "github"
|
||||
|
||||
- name: Configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
7
.github/workflows/randomized_tests.yml
vendored
@@ -19,15 +19,16 @@ jobs:
|
||||
tests:
|
||||
name: Run randomized tests
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- self-hosted
|
||||
- randomized-tests
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
2
.github/workflows/release_actions.yml
vendored
@@ -1,5 +1,3 @@
|
||||
name: Release Actions
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
65
.github/workflows/release_nightly.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -70,12 +70,12 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -100,7 +100,8 @@ jobs:
|
||||
name: Create a Linux *.tar.gz bundle for x86
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
- self-hosted
|
||||
- deploy
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
@@ -109,19 +110,13 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -149,12 +144,27 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
- name: "Setup jq"
|
||||
uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2
|
||||
|
||||
- name: Set up Clang
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
- name: rustup
|
||||
run: |
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
@@ -171,28 +181,3 @@ jobs:
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- bundle-mac
|
||||
- bundle-linux-x86
|
||||
- bundle-linux-arm
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update nightly tag
|
||||
run: |
|
||||
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
|
||||
echo "Nightly tag already points to current commit. Skipping tagging."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
name: Update All Top Ranking Issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 */12 * * *"
|
||||
@@ -10,16 +8,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- name: Set up uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "script/update_top_ranking_issues/pyproject.toml"
|
||||
- name: Install Python 3.13
|
||||
run: uv python install 3.13
|
||||
- name: Install dependencies
|
||||
run: uv sync --project script/update_top_ranking_issues -p 3.13
|
||||
- name: Run script
|
||||
run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
name: Update Weekly Top Ranking Issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 15 * * *"
|
||||
@@ -10,16 +8,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- name: Set up uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "script/update_top_ranking_issues/pyproject.toml"
|
||||
- name: Install Python 3.13
|
||||
run: uv python install 3.13
|
||||
- name: Install dependencies
|
||||
run: uv sync --project script/update_top_ranking_issues -p 3.13
|
||||
- name: Run script
|
||||
run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
|
||||
|
||||
2
.gitignore
vendored
@@ -10,7 +10,7 @@
|
||||
/crates/collab/seed.json
|
||||
/crates/zed/resources/flatpak/flatpak-cargo-sources.json
|
||||
/dev.zed.Zed*.json
|
||||
/assets/*licenses.*
|
||||
/assets/*licenses.md
|
||||
**/venv
|
||||
.build
|
||||
*.wasm
|
||||
|
||||
@@ -38,10 +38,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"file_types": {
|
||||
"Dockerfile": ["Dockerfile*[!dockerignore]"],
|
||||
"Git Ignore": ["dockerignore"]
|
||||
},
|
||||
"hard_tabs": false,
|
||||
"formatter": "auto",
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
|
||||
1380
Cargo.lock
generated
97
Cargo.toml
@@ -27,7 +27,6 @@ members = [
|
||||
"crates/diagnostics",
|
||||
"crates/docs_preprocessor",
|
||||
"crates/editor",
|
||||
"crates/evals",
|
||||
"crates/extension",
|
||||
"crates/extension_api",
|
||||
"crates/extension_cli",
|
||||
@@ -87,7 +86,6 @@ members = [
|
||||
"crates/remote",
|
||||
"crates/remote_server",
|
||||
"crates/repl",
|
||||
"crates/reqwest_client",
|
||||
"crates/rich_text",
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
@@ -99,7 +97,6 @@ members = [
|
||||
"crates/settings_ui",
|
||||
"crates/snippet",
|
||||
"crates/snippet_provider",
|
||||
"crates/snippets_ui",
|
||||
"crates/sqlez",
|
||||
"crates/sqlez_macros",
|
||||
"crates/story",
|
||||
@@ -122,7 +119,6 @@ members = [
|
||||
"crates/ui",
|
||||
"crates/ui_input",
|
||||
"crates/ui_macros",
|
||||
"crates/reqwest_client",
|
||||
"crates/util",
|
||||
"crates/vcs_menu",
|
||||
"crates/vim",
|
||||
@@ -145,6 +141,7 @@ members = [
|
||||
"extensions/elm",
|
||||
"extensions/emmet",
|
||||
"extensions/erlang",
|
||||
"extensions/gleam",
|
||||
"extensions/glsl",
|
||||
"extensions/haskell",
|
||||
"extensions/html",
|
||||
@@ -153,15 +150,16 @@ members = [
|
||||
"extensions/php",
|
||||
"extensions/perplexity",
|
||||
"extensions/prisma",
|
||||
"extensions/proto",
|
||||
"extensions/purescript",
|
||||
"extensions/ruff",
|
||||
"extensions/ruby",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/svelte",
|
||||
"extensions/terraform",
|
||||
"extensions/test-extension",
|
||||
"extensions/toml",
|
||||
"extensions/uppercase",
|
||||
"extensions/uiua",
|
||||
"extensions/vue",
|
||||
"extensions/zig",
|
||||
@@ -175,7 +173,6 @@ members = [
|
||||
default-members = ["crates/zed"]
|
||||
|
||||
[workspace.dependencies]
|
||||
|
||||
#
|
||||
# Workspace member crates
|
||||
#
|
||||
@@ -219,8 +216,9 @@ git = { path = "crates/git" }
|
||||
git_hosting_providers = { path = "crates/git_hosting_providers" }
|
||||
go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]}
|
||||
gpui = { path = "crates/gpui" }
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
handlebars = "4.3"
|
||||
headless = { path = "crates/headless" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
@@ -264,7 +262,6 @@ release_channel = { path = "crates/release_channel" }
|
||||
remote = { path = "crates/remote" }
|
||||
remote_server = { path = "crates/remote_server" }
|
||||
repl = { path = "crates/repl" }
|
||||
reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
@@ -276,7 +273,6 @@ settings = { path = "crates/settings" }
|
||||
settings_ui = { path = "crates/settings_ui" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
snippet_provider = { path = "crates/snippet_provider" }
|
||||
snippets_ui = { path = "crates/snippets_ui" }
|
||||
sqlez = { path = "crates/sqlez" }
|
||||
sqlez_macros = { path = "crates/sqlez_macros" }
|
||||
story = { path = "crates/story" }
|
||||
@@ -318,7 +314,6 @@ any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
arrayvec = { version = "0.7.4", features = ["serde"] }
|
||||
ashpd = "0.9.1"
|
||||
async-compat = "0.2.1"
|
||||
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
|
||||
async-dispatcher = "0.1"
|
||||
async-fs = "1.6"
|
||||
@@ -326,7 +321,7 @@ async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "8
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.5.0"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.24"
|
||||
async-tungstenite = "0.23"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.22"
|
||||
@@ -335,7 +330,6 @@ blade-graphics = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.18"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -358,15 +352,18 @@ futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
git2 = { version = "0.19", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
html5ever = "0.27.0"
|
||||
hyper = "0.14"
|
||||
html5ever = "0.27.0"
|
||||
ignore = "0.4.22"
|
||||
image = "0.25.1"
|
||||
indexmap = { version = "1.6.2", features = ["serde"] }
|
||||
indoc = "2"
|
||||
# We explicitly disable http2 support in isahc.
|
||||
isahc = { version = "1.7.2", default-features = false, features = [
|
||||
"text-decoding",
|
||||
] }
|
||||
itertools = "0.13.0"
|
||||
jsonwebtoken = "9.3"
|
||||
libc = "0.2"
|
||||
@@ -381,9 +378,9 @@ ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
profiling = "1"
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
profiling = "1"
|
||||
prost = "0.9"
|
||||
prost-build = "0.9"
|
||||
prost-types = "0.9"
|
||||
@@ -391,15 +388,12 @@ pulldown-cmark = { version = "0.12.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = ["charset", "http2", "macos-system-configuration", "rustls-tls-native-roots", "stream"]}
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.15", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
rustls = "0.20.3"
|
||||
rustls-native-certs = "0.8.0"
|
||||
schemars = { version = "0.8", features = ["impl_json_schema"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
@@ -418,12 +412,11 @@ similar = "1.3"
|
||||
simplelog = "0.12.2"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
sqlformat = "0.2"
|
||||
strsim = "0.11"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
sysinfo = "0.30.7"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "1.0.29"
|
||||
tiktoken-rs = "0.5.9"
|
||||
@@ -436,52 +429,50 @@ time = { version = "0.3", features = [
|
||||
] }
|
||||
tiny_http = "0.8"
|
||||
toml = "0.8"
|
||||
tokio = { version = "1" }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.23", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.23"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = "0.23"
|
||||
tree-sitter-css = "0.23"
|
||||
tree-sitter-elixir = "0.3"
|
||||
tree-sitter-embedded-template = "0.23.0"
|
||||
tree-sitter-go = "0.23"
|
||||
tree-sitter-go-mod = { git = "https://github.com/zed-industries/tree-sitter-go-mod", rev = "a9aea5e358cde4d0f8ff20b7bc4fa311e359c7ca", package = "tree-sitter-gomod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" }
|
||||
tree-sitter-heex = { git = "https://github.com/zed-industries/tree-sitter-heex", rev = "1dd45142fbb05562e35b2040c6129c9bca346592" }
|
||||
tree-sitter-diff = "0.1.0"
|
||||
tree-sitter = { version = "0.22", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.21"
|
||||
tree-sitter-c = "0.21"
|
||||
tree-sitter-cpp = "0.22"
|
||||
tree-sitter-css = "0.21"
|
||||
tree-sitter-elixir = "0.2"
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-go = "0.21"
|
||||
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "1f55029bacd0a6a11f6eb894c4312d429dcf735c", package = "tree-sitter-gomod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work", rev = "dcbabff454703c3a4bc98a23cf8778d4be46fd22" }
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "6dd0303acf7138dd2b9b432a229e16539581c701" }
|
||||
tree-sitter-html = "0.20"
|
||||
tree-sitter-jsdoc = "0.23"
|
||||
tree-sitter-json = "0.23"
|
||||
tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "4cfa6aad6b75052a5077c80fd934757d9267d81b" }
|
||||
tree-sitter-python = "0.23"
|
||||
tree-sitter-regex = "0.23"
|
||||
tree-sitter-ruby = "0.23"
|
||||
tree-sitter-rust = "0.23"
|
||||
tree-sitter-typescript = "0.23"
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
tree-sitter-jsdoc = "0.21"
|
||||
tree-sitter-json = "0.21"
|
||||
tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "e3855e37f8f2c71aa7513c18a9c95fb7461b1b10" }
|
||||
protols-tree-sitter-proto = "0.2"
|
||||
tree-sitter-python = "0.21"
|
||||
tree-sitter-regex = "0.21"
|
||||
tree-sitter-ruby = "0.21"
|
||||
tree-sitter-rust = "0.21"
|
||||
tree-sitter-typescript = "0.21"
|
||||
tree-sitter-yaml = "0.6"
|
||||
unindent = "0.1.7"
|
||||
unicase = "2.6"
|
||||
unicode-segmentation = "1.10"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
|
||||
wasmparser = "0.215"
|
||||
wasm-encoder = "0.215"
|
||||
wasmtime = { version = "24", default-features = false, features = [
|
||||
wasmparser = "0.201"
|
||||
wasm-encoder = "0.201"
|
||||
wasmtime = { version = "21.0.1", default-features = false, features = [
|
||||
"async",
|
||||
"demangle",
|
||||
"runtime",
|
||||
"cranelift",
|
||||
"component-model",
|
||||
] }
|
||||
wasmtime-wasi = "24"
|
||||
wasmtime-wasi = "21.0.1"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.201"
|
||||
zstd = "0.11"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
git = "https://github.com/zed-industries/async-stripe"
|
||||
rev = "3672dd4efb7181aa597bf580bf5a2f5d23db6735"
|
||||
version = "0.39"
|
||||
default-features = false
|
||||
features = [
|
||||
"runtime-tokio-hyper-rustls",
|
||||
@@ -499,6 +490,7 @@ features = [
|
||||
"implement",
|
||||
"Foundation_Numerics",
|
||||
"Storage",
|
||||
"System",
|
||||
"System_Threading",
|
||||
"UI_ViewManagement",
|
||||
"Wdk_System_SystemServices",
|
||||
@@ -529,10 +521,13 @@ features = [
|
||||
"Win32_UI_Input_Ime",
|
||||
"Win32_UI_Input_KeyboardAndMouse",
|
||||
"Win32_UI_Shell",
|
||||
"Win32_UI_Shell_Common",
|
||||
"Win32_UI_WindowsAndMessaging",
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
# Patch Tree-sitter for updated wasmtime.
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7f4a57817d58a2f134fe863674acad6bbf007228" }
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
debug = "limited"
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[build]
|
||||
dockerfile = "Dockerfile-cross"
|
||||
@@ -4,19 +4,11 @@ FROM rust:1.81-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
# Replace the Cargo configuration with the one used by collab.
|
||||
COPY ./.cargo/collab-config.toml ./.cargo/config.toml
|
||||
|
||||
# Compile collab server
|
||||
ARG CARGO_PROFILE_RELEASE_PANIC=abort
|
||||
ARG GITHUB_SHA
|
||||
|
||||
ENV GITHUB_SHA=$GITHUB_SHA
|
||||
|
||||
# Also add `cmake`, since we need it to build `wasmtime`.
|
||||
RUN apt-get update; \
|
||||
apt-get install -y --no-install-recommends cmake
|
||||
|
||||
RUN --mount=type=cache,target=./script/node_modules \
|
||||
--mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=/usr/local/cargo/git \
|
||||
@@ -1,17 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG CROSS_BASE_IMAGE
|
||||
FROM ${CROSS_BASE_IMAGE}
|
||||
WORKDIR /app
|
||||
ARG TZ=Etc/UTC \
|
||||
LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
ENV CARGO_TERM_COLOR=always
|
||||
|
||||
COPY script/install-mold script/
|
||||
RUN ./script/install-mold "2.34.0"
|
||||
COPY script/remote-server script/
|
||||
RUN ./script/remote-server
|
||||
|
||||
COPY . .
|
||||
@@ -1,16 +0,0 @@
|
||||
.git
|
||||
.github
|
||||
**/.gitignore
|
||||
**/.gitkeep
|
||||
.gitattributes
|
||||
.mailmap
|
||||
**/target
|
||||
zed.xcworkspace
|
||||
.DS_Store
|
||||
compose.yml
|
||||
plugins/bin
|
||||
script/node_modules
|
||||
styles/node_modules
|
||||
crates/collab/static/styles.css
|
||||
vendor/bin
|
||||
assets/themes/
|
||||
@@ -1,26 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG BASE_IMAGE
|
||||
FROM ${BASE_IMAGE}
|
||||
WORKDIR /app
|
||||
ARG TZ=Etc/UTC \
|
||||
LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
ENV CARGO_TERM_COLOR=always
|
||||
|
||||
COPY script/linux script/
|
||||
RUN ./script/linux
|
||||
COPY script/install-mold script/install-cmake script/
|
||||
RUN ./script/install-mold "2.34.0"
|
||||
RUN ./script/install-cmake "3.30.4"
|
||||
|
||||
COPY . .
|
||||
|
||||
# When debugging, make these into individual RUN statements.
|
||||
# Cleanup to avoid saving big layers we aren't going to use.
|
||||
RUN . "$HOME/.cargo/env" \
|
||||
&& cargo fetch \
|
||||
&& cargo build \
|
||||
&& cargo run -- --help \
|
||||
&& cargo clean --quiet
|
||||
@@ -1,2 +0,0 @@
|
||||
**/target
|
||||
**/node_modules
|
||||
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-cursor"><path d="M17 22h-1a4 4 0 0 1-4-4V6a4 4 0 0 1 4-4h1"/><path d="M7 22h1a4 4 0 0 0 4-4v-1"/><path d="M7 2h1a4 4 0 0 1 4 4v1"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-select"><path d="M5 3a2 2 0 0 0-2 2"/><path d="M19 3a2 2 0 0 1 2 2"/><path d="M21 19a2 2 0 0 1-2 2"/><path d="M5 21a2 2 0 0 1-2-2"/><path d="M9 3h1"/><path d="M9 21h1"/><path d="M14 3h1"/><path d="M14 21h1"/><path d="M3 9v1"/><path d="M21 9v1"/><path d="M3 14v1"/><path d="M21 14v1"/><line x1="7" x2="15" y1="8" y2="8"/><line x1="7" x2="17" y1="12" y2="12"/><line x1="7" x2="13" y1="16" y2="16"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 345 B After Width: | Height: | Size: 610 B |
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-cursor"><path d="M17 22h-1a4 4 0 0 1-4-4V6a4 4 0 0 1 4-4h1"/><path d="M7 22h1a4 4 0 0 0 4-4v-1"/><path d="M7 2h1a4 4 0 0 1 4 4v1"/></svg>
|
||||
|
Before Width: | Height: | Size: 330 B After Width: | Height: | Size: 345 B |
@@ -20,7 +20,6 @@
|
||||
"bashrc": "terminal",
|
||||
"bmp": "image",
|
||||
"c": "c",
|
||||
"c++": "cpp",
|
||||
"cc": "cpp",
|
||||
"cjs": "javascript",
|
||||
"coffee": "coffeescript",
|
||||
@@ -28,7 +27,6 @@
|
||||
"cpp": "cpp",
|
||||
"css": "css",
|
||||
"csv": "storage",
|
||||
"cxx": "cpp",
|
||||
"cts": "typescript",
|
||||
"dart": "dart",
|
||||
"dat": "storage",
|
||||
@@ -68,13 +66,11 @@
|
||||
"heex": "elixir",
|
||||
"heic": "image",
|
||||
"heif": "image",
|
||||
"hh": "cpp",
|
||||
"hpp": "cpp",
|
||||
"hrl": "erlang",
|
||||
"hs": "haskell",
|
||||
"htm": "template",
|
||||
"html": "template",
|
||||
"hxx": "cpp",
|
||||
"ib": "storage",
|
||||
"ico": "image",
|
||||
"ini": "settings",
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 450 B |
@@ -1,11 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.6665 14V9.33333" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3.6665 6.66667V2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8 14V8" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8 5.33333V2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12.3335 14V10.6667" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12.3335 8V2" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.3335 9.33333H5.00016" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6.6665 5.33334H9.33317" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 10.6667H13.6667" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.1 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-select"><path d="M5 3a2 2 0 0 0-2 2"/><path d="M19 3a2 2 0 0 1 2 2"/><path d="M21 19a2 2 0 0 1-2 2"/><path d="M5 21a2 2 0 0 1-2-2"/><path d="M9 3h1"/><path d="M9 21h1"/><path d="M14 3h1"/><path d="M14 21h1"/><path d="M3 9v1"/><path d="M21 9v1"/><path d="M3 14v1"/><path d="M21 14v1"/><line x1="7" x2="15" y1="8" y2="8"/><line x1="7" x2="17" y1="12" y2="12"/><line x1="7" x2="13" y1="16" y2="16"/></svg>
|
||||
|
Before Width: | Height: | Size: 610 B |
@@ -56,7 +56,6 @@
|
||||
"shift-tab": "editor::TabPrev",
|
||||
"ctrl-k": "editor::CutToEndOfLine",
|
||||
// "ctrl-t": "editor::Transpose",
|
||||
"alt-q": "editor::Rewrap",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"shift-delete": "editor::Cut",
|
||||
@@ -166,7 +165,6 @@
|
||||
{
|
||||
"context": "AssistantPanel",
|
||||
"bindings": {
|
||||
"ctrl-k c": "assistant::CopyCode",
|
||||
"ctrl-g": "search::SelectNextMatch",
|
||||
"ctrl-shift-g": "search::SelectPrevMatch",
|
||||
"alt-m": "assistant::ToggleModelSelector",
|
||||
@@ -196,7 +194,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar && in_replace > Editor",
|
||||
"context": "BufferSearchBar && in_replace",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"ctrl-enter": "search::ReplaceAll"
|
||||
@@ -310,11 +308,6 @@
|
||||
"ctrl-shift-\\": "editor::MoveToEnclosingBracket",
|
||||
"ctrl-shift-[": "editor::Fold",
|
||||
"ctrl-shift-]": "editor::UnfoldLines",
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-.": "editor::ToggleCodeActions",
|
||||
"alt-ctrl-r": "editor::RevealInFileManager",
|
||||
@@ -525,13 +518,6 @@
|
||||
"alt-enter": "editor::Newline"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
"ctrl-[": "assistant::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "assistant::CycleNextInlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"bindings": {
|
||||
@@ -664,8 +650,7 @@
|
||||
"shift-up": "terminal::ScrollLineUp",
|
||||
"shift-down": "terminal::ScrollLineDown",
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom",
|
||||
"ctrl-shift-space": "terminal::ToggleViMode"
|
||||
"shift-end": "terminal::ScrollToBottom"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -51,7 +51,6 @@
|
||||
"shift-tab": "editor::TabPrev",
|
||||
"ctrl-k": "editor::CutToEndOfLine",
|
||||
"ctrl-t": "editor::Transpose",
|
||||
"alt-q": "editor::Rewrap",
|
||||
"cmd-backspace": "editor::DeleteToBeginningOfLine",
|
||||
"cmd-delete": "editor::DeleteToEndOfLine",
|
||||
"alt-backspace": "editor::DeleteToPreviousWordStart",
|
||||
@@ -188,7 +187,6 @@
|
||||
{
|
||||
"context": "AssistantPanel",
|
||||
"bindings": {
|
||||
"cmd-k c": "assistant::CopyCode",
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPrevMatch",
|
||||
"alt-m": "assistant::ToggleModelSelector",
|
||||
@@ -232,7 +230,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar && in_replace > Editor",
|
||||
"context": "BufferSearchBar && in_replace",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"cmd-enter": "search::ReplaceAll"
|
||||
@@ -347,11 +345,6 @@
|
||||
"cmd-shift-\\": "editor::MoveToEnclosingBracket",
|
||||
"alt-cmd-[": "editor::Fold",
|
||||
"alt-cmd-]": "editor::UnfoldLines",
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"alt-cmd-r": "editor::RevealInFileManager",
|
||||
@@ -395,7 +388,6 @@
|
||||
// Change the default action on `menu::Confirm` by setting the parameter
|
||||
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"ctrl-cmd-o": "projects::OpenRemote",
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
@@ -441,12 +433,7 @@
|
||||
"cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"],
|
||||
"cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"],
|
||||
"cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"],
|
||||
"cmd-shift-x": "zed::Extensions"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && !Terminal",
|
||||
"bindings": {
|
||||
"cmd-shift-x": "zed::Extensions",
|
||||
"alt-t": "task::Rerun",
|
||||
"alt-shift-t": "task::Spawn"
|
||||
}
|
||||
@@ -538,13 +525,6 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
"ctrl-[": "assistant::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "assistant::CycleNextInlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"bindings": {
|
||||
@@ -679,8 +659,7 @@
|
||||
"cmd-home": "terminal::ScrollToTop",
|
||||
"cmd-end": "terminal::ScrollToBottom",
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom",
|
||||
"ctrl-shift-space": "terminal::ToggleViMode"
|
||||
"shift-end": "terminal::ScrollToBottom"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -128,23 +128,13 @@
|
||||
"shift-m": "vim::WindowMiddle",
|
||||
"shift-l": "vim::WindowBottom",
|
||||
// z commands
|
||||
"z enter": ["workspace::SendKeystrokes", "z t ^"],
|
||||
"z -": ["workspace::SendKeystrokes", "z b ^"],
|
||||
"z ^": ["workspace::SendKeystrokes", "shift-h k z b ^"],
|
||||
"z +": ["workspace::SendKeystrokes", "shift-l j z t ^"],
|
||||
"z t": "editor::ScrollCursorTop",
|
||||
"z z": "editor::ScrollCursorCenter",
|
||||
"z .": ["workspace::SendKeystrokes", "z z ^"],
|
||||
"z b": "editor::ScrollCursorBottom",
|
||||
"z a": "editor::ToggleFold",
|
||||
"z A": "editor::ToggleFoldRecursive",
|
||||
"z c": "editor::Fold",
|
||||
"z C": "editor::FoldRecursive",
|
||||
"z o": "editor::UnfoldLines",
|
||||
"z O": "editor::UnfoldRecursive",
|
||||
"z f": "editor::FoldSelectedRanges",
|
||||
"z M": "editor::FoldAll",
|
||||
"z R": "editor::UnfoldAll",
|
||||
"shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }],
|
||||
"shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }],
|
||||
// Count support
|
||||
@@ -249,14 +239,11 @@
|
||||
"g shift-u": ["vim::PushOperator", "Uppercase"],
|
||||
"g ~": ["vim::PushOperator", "OppositeCase"],
|
||||
"\"": ["vim::PushOperator", "Register"],
|
||||
"g q": ["vim::PushOperator", "Rewrap"],
|
||||
"g w": ["vim::PushOperator", "Rewrap"],
|
||||
"q": "vim::ToggleRecord",
|
||||
"shift-q": "vim::ReplayLastRecording",
|
||||
"@": ["vim::PushOperator", "ReplayRegister"],
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePrevItem",
|
||||
"insert": "vim::InsertBefore",
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode",
|
||||
@@ -303,8 +290,6 @@
|
||||
"g ctrl-x": ["vim::Decrement", { "step": true }],
|
||||
"shift-i": "vim::InsertBefore",
|
||||
"shift-a": "vim::InsertAfter",
|
||||
"g I": "vim::VisualInsertFirstNonWhiteSpace",
|
||||
"g A": "vim::VisualInsertEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"r": ["vim::PushOperator", "Replace"],
|
||||
"ctrl-c": ["vim::SwitchMode", "Normal"],
|
||||
@@ -315,7 +300,6 @@
|
||||
"i": ["vim::PushOperator", { "Object": { "around": false } }],
|
||||
"a": ["vim::PushOperator", { "Object": { "around": true } }],
|
||||
"g c": "vim::ToggleComments",
|
||||
"g q": "vim::Rewrap",
|
||||
"\"": ["vim::PushOperator", "Register"],
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
@@ -339,8 +323,7 @@
|
||||
"ctrl-t": "vim::Indent",
|
||||
"ctrl-d": "vim::Outdent",
|
||||
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
|
||||
"ctrl-r": ["vim::PushOperator", "Register"],
|
||||
"insert": "vim::ToggleReplace"
|
||||
"ctrl-r": ["vim::PushOperator", "Register"]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -359,8 +342,7 @@
|
||||
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
|
||||
"backspace": "vim::UndoReplace",
|
||||
"tab": "vim::Tab",
|
||||
"enter": "vim::Enter",
|
||||
"insert": "vim::InsertBefore"
|
||||
"enter": "vim::Enter"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -445,15 +427,6 @@
|
||||
"~": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == gq",
|
||||
"bindings": {
|
||||
"g q": "vim::CurrentLine",
|
||||
"q": "vim::CurrentLine",
|
||||
"g w": "vim::CurrentLine",
|
||||
"w": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == y",
|
||||
"bindings": {
|
||||
|
||||
@@ -47,20 +47,6 @@ And here's the section to rewrite based on that prompt again for reference:
|
||||
<rewrite_this>
|
||||
{{{rewrite_section}}}
|
||||
</rewrite_this>
|
||||
|
||||
{{#if diagnostic_errors}}
|
||||
{{#each diagnostic_errors}}
|
||||
|
||||
Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to.
|
||||
|
||||
<diagnostic_error>
|
||||
<line_number>{{line_number}}</line_number>
|
||||
<error_message>{{error_message}}</error_message>
|
||||
<code_content>{{code_content}}</code_content>
|
||||
</diagnostic_error>
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{/if}}
|
||||
|
||||
Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved.
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings.
|
||||
Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets
|
||||
that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently
|
||||
distinct from previous ones.
|
||||
|
||||
Here is the question that's been asked, together with context that the developer has added manually:
|
||||
|
||||
{{{context_buffer}}}
|
||||
@@ -15,11 +15,9 @@
|
||||
// text editor:
|
||||
//
|
||||
// 1. "VSCode"
|
||||
// 2. "Atom"
|
||||
// 3. "JetBrains"
|
||||
// 4. "None"
|
||||
// 5. "SublimeText"
|
||||
// 6. "TextMate"
|
||||
// 2. "JetBrains"
|
||||
// 3. "SublimeText"
|
||||
// 4. "Atom"
|
||||
"base_keymap": "VSCode",
|
||||
// Features that can be globally enabled or disabled
|
||||
"features": {
|
||||
@@ -113,18 +111,6 @@
|
||||
"use_system_path_prompts": true,
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Cursor shape for the default editor.
|
||||
// 1. A vertical bar
|
||||
// "bar"
|
||||
// 2. A block that surrounds the following character
|
||||
// "block"
|
||||
// 3. An underline / underscore that runs along the following character
|
||||
// "underline"
|
||||
// 4. A box drawn around the following character
|
||||
// "hollow"
|
||||
//
|
||||
// Default: not set, defaults to "bar"
|
||||
"cursor_shape": null,
|
||||
// How to highlight the current line in the editor.
|
||||
//
|
||||
// 1. Don't highlight the current line:
|
||||
@@ -320,10 +306,6 @@
|
||||
"show_parameter_hints": true,
|
||||
// Corresponds to null/None LSP hint type value.
|
||||
"show_other_hints": true,
|
||||
// Whether to show a background for inlay hints.
|
||||
//
|
||||
// If set to `true`, the background will use the `hint.background` color from the current theme.
|
||||
"show_background": false,
|
||||
// Time to wait after editing the buffer, before requesting the hints,
|
||||
// set to 0 to disable debouncing.
|
||||
"edit_debounce_ms": 700,
|
||||
@@ -356,19 +338,9 @@
|
||||
/// Scrollbar-related settings
|
||||
"scrollbar": {
|
||||
/// When to show the scrollbar in the project panel.
|
||||
/// This setting can take four values:
|
||||
///
|
||||
/// 1. null (default): Inherit editor settings
|
||||
/// 2. Show the scrollbar if there's important information or
|
||||
/// follow the system's configured behavior (default):
|
||||
/// "auto"
|
||||
/// 3. Match the system's configured behavior:
|
||||
/// "system"
|
||||
/// 4. Always show the scrollbar:
|
||||
/// "always"
|
||||
/// 5. Never show the scrollbar:
|
||||
/// "never"
|
||||
"show": null
|
||||
/// Default: always
|
||||
"show": "always"
|
||||
}
|
||||
},
|
||||
"outline_panel": {
|
||||
@@ -494,14 +466,7 @@
|
||||
// Position of the close button on the editor tabs.
|
||||
"close_position": "right",
|
||||
// Whether to show the file icon for a tab.
|
||||
"file_icons": false,
|
||||
// What to do after closing the current tab.
|
||||
//
|
||||
// 1. Activate the tab that was open previously (default)
|
||||
// "History"
|
||||
// 2. Activate the neighbour tab (prefers the right one, if present)
|
||||
// "Neighbour"
|
||||
"activate_on_close": "history"
|
||||
"file_icons": false
|
||||
},
|
||||
// Settings related to preview tabs.
|
||||
"preview_tabs": {
|
||||
@@ -515,11 +480,6 @@
|
||||
// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
|
||||
"enable_preview_from_code_navigation": false
|
||||
},
|
||||
// Settings related to the file finder.
|
||||
"file_finder": {
|
||||
// Whether to show file icons in the file finder.
|
||||
"file_icons": true
|
||||
},
|
||||
// Whether or not to remove any trailing whitespace from lines of a buffer
|
||||
// before saving it.
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
@@ -552,16 +512,17 @@
|
||||
// How to soft-wrap long lines of text.
|
||||
// Possible values:
|
||||
//
|
||||
// 1. Prefer a single line generally, unless an overly long line is encountered.
|
||||
// 1. Do not soft wrap.
|
||||
// "soft_wrap": "none",
|
||||
// "soft_wrap": "prefer_line", // (deprecated, same as "none")
|
||||
// 2. Soft wrap lines that overflow the editor.
|
||||
// 2. Prefer a single line generally, unless an overly long line is encountered.
|
||||
// "soft_wrap": "prefer_line",
|
||||
// 3. Soft wrap lines that overflow the editor.
|
||||
// "soft_wrap": "editor_width",
|
||||
// 3. Soft wrap lines at the preferred line length.
|
||||
// 4. Soft wrap lines at the preferred line length.
|
||||
// "soft_wrap": "preferred_line_length",
|
||||
// 4. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
|
||||
// 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
|
||||
// "soft_wrap": "bounded",
|
||||
"soft_wrap": "none",
|
||||
"soft_wrap": "prefer_line",
|
||||
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
||||
// is enabled.
|
||||
"preferred_line_length": 80,
|
||||
@@ -616,11 +577,13 @@
|
||||
}
|
||||
},
|
||||
// Configuration for how direnv configuration should be loaded. May take 2 values:
|
||||
// 1. Load direnv configuration using `direnv export json` directly.
|
||||
// "load_direnv": "direct"
|
||||
// 2. Load direnv configuration through the shell hook, works for POSIX shells and fish.
|
||||
// 1. Load direnv configuration through the shell hook, works for POSIX shells and fish.
|
||||
// "load_direnv": "shell_hook"
|
||||
"load_direnv": "direct",
|
||||
// 2. Load direnv configuration using `direnv export json` directly.
|
||||
// This can help with some shells that otherwise would not detect
|
||||
// the direnv environment, such as nushell or elvish.
|
||||
// "load_direnv": "direct"
|
||||
"load_direnv": "shell_hook",
|
||||
"inline_completions": {
|
||||
// A list of globs representing files that inline completions should be disabled for.
|
||||
"disabled_globs": [".env"]
|
||||
@@ -686,18 +649,6 @@
|
||||
// 3. Always blink the cursor, ignoring the terminal mode
|
||||
// "blinking": "on",
|
||||
"blinking": "terminal_controlled",
|
||||
// Default cursor shape for the terminal.
|
||||
// 1. A block that surrounds the following character
|
||||
// "block"
|
||||
// 2. A vertical bar
|
||||
// "bar"
|
||||
// 3. An underline / underscore that runs along the following character
|
||||
// "underline"
|
||||
// 4. A box drawn around the following character
|
||||
// "hollow"
|
||||
//
|
||||
// Default: not set, defaults to "block"
|
||||
"cursor_shape": null,
|
||||
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
|
||||
// Alternate Scroll mode converts mouse scroll events into up / down key
|
||||
// presses when in the alternate screen (e.g. when running applications
|
||||
@@ -747,7 +698,7 @@
|
||||
// to the current working directory. We recommend overriding this
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [".env", "env", ".venv", "venv"],
|
||||
// Can also be `csh`, `fish`, `nushell` and `power_shell`
|
||||
// Can also be `csh`, `fish`, and `nushell`
|
||||
"activate_script": "default"
|
||||
}
|
||||
},
|
||||
@@ -788,7 +739,6 @@
|
||||
// }
|
||||
//
|
||||
"file_types": {
|
||||
"Plain Text": ["txt"],
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": [
|
||||
"**/.zed/**/*.json",
|
||||
@@ -796,24 +746,8 @@
|
||||
"**/Zed/**/*.json",
|
||||
"tsconfig.json",
|
||||
"pyrightconfig.json"
|
||||
],
|
||||
"TOML": ["uv.lock"]
|
||||
]
|
||||
},
|
||||
/// By default use a recent system version of node, or install our own.
|
||||
/// You can override this to use a version of node that is not in $PATH with:
|
||||
/// {
|
||||
/// "node": {
|
||||
/// "node_path": "/path/to/node"
|
||||
/// "npm_path": "/path/to/npm" (defaults to node_path/../npm)
|
||||
/// }
|
||||
/// }
|
||||
/// or to ensure Zed always downloads and installs an isolated version of node:
|
||||
/// {
|
||||
/// "node": {
|
||||
/// "ignore_system_version": true,
|
||||
/// }
|
||||
/// NOTE: changing this setting currently requires restarting Zed.
|
||||
"node": {},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
// If you don't want any of these extensions, add this field to your settings
|
||||
@@ -824,7 +758,6 @@
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Astro": {
|
||||
"language_servers": ["astro-language-server", "..."],
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-astro"]
|
||||
@@ -848,13 +781,6 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"Dart": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"Diff": {
|
||||
"remove_trailing_whitespace_on_save": false,
|
||||
"ensure_final_newline_on_save": false
|
||||
},
|
||||
"Elixir": {
|
||||
"language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
|
||||
},
|
||||
@@ -1087,7 +1013,7 @@
|
||||
// environment variables.
|
||||
//
|
||||
// Examples:
|
||||
// - "proxy": "socks5h://localhost:10808"
|
||||
// - "proxy": "socks5://localhost:10808"
|
||||
// - "proxy": "http://127.0.0.1:10809"
|
||||
"proxy": null,
|
||||
// Set to configure aliases for the command palette.
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
// Server-specific settings
|
||||
//
|
||||
// For a full list of overridable settings, and general information on settings,
|
||||
// see the documentation: https://zed.dev/docs/configuring-zed#settings-files
|
||||
{
|
||||
"lsp": {}
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
//
|
||||
// To see all of Zed's default settings without changing your
|
||||
// custom settings, run `zed: open default settings` from the
|
||||
// command palette (cmd-shift-p / ctrl-shift-p)
|
||||
// command palette
|
||||
{
|
||||
"ui_font_size": 16,
|
||||
"buffer_font_size": 16,
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
allow-private-module-inception = true
|
||||
avoid-breaking-exported-api = false
|
||||
|
||||
@@ -10,7 +10,7 @@ use gpui::{
|
||||
use language::{
|
||||
LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId, LanguageServerName,
|
||||
};
|
||||
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
|
||||
use project::{LanguageServerProgress, Project};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle};
|
||||
@@ -19,10 +19,7 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
|
||||
pub enum Event {
|
||||
ShowError {
|
||||
lsp_name: LanguageServerName,
|
||||
error: String,
|
||||
},
|
||||
ShowError { lsp_name: Arc<str>, error: String },
|
||||
}
|
||||
|
||||
pub struct ActivityIndicator {
|
||||
@@ -101,7 +98,6 @@ impl ActivityIndicator {
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
buffer.set_capability(language::Capability::ReadOnly, cx);
|
||||
})?;
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
@@ -127,7 +123,7 @@ impl ActivityIndicator {
|
||||
self.statuses.retain(|status| {
|
||||
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
|
||||
cx.emit(Event::ShowError {
|
||||
lsp_name: status.name.clone(),
|
||||
lsp_name: status.name.0.clone(),
|
||||
error: error.clone(),
|
||||
});
|
||||
false
|
||||
@@ -176,31 +172,7 @@ impl ActivityIndicator {
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn pending_environment_errors<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
) -> impl Iterator<Item = (&'a WorktreeId, &'a EnvironmentErrorMessage)> {
|
||||
self.project.read(cx).shell_environment_errors(cx)
|
||||
}
|
||||
|
||||
fn content_to_render(&mut self, cx: &mut ViewContext<Self>) -> Option<Content> {
|
||||
// Show if any direnv calls failed
|
||||
if let Some((&worktree_id, error)) = self.pending_environment_errors(cx).next() {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: error.0.clone(),
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.remove_environment_error(cx, worktree_id);
|
||||
});
|
||||
cx.dispatch_action(Box::new(workspace::OpenLog));
|
||||
})),
|
||||
});
|
||||
}
|
||||
// Show any language server has pending activity.
|
||||
let mut pending_work = self.pending_language_server_work(cx);
|
||||
if let Some(PendingWork {
|
||||
@@ -252,10 +224,10 @@ impl ActivityIndicator {
|
||||
for status in &self.statuses {
|
||||
match status.status {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => {
|
||||
checking_for_update.push(status.name.clone())
|
||||
checking_for_update.push(status.name.0.as_ref())
|
||||
}
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::None => {}
|
||||
}
|
||||
}
|
||||
@@ -267,24 +239,8 @@ impl ActivityIndicator {
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!(
|
||||
"Downloading {}...",
|
||||
downloading.iter().map(|name| name.0.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}
|
||||
)
|
||||
),
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.statuses
|
||||
.retain(|status| !downloading.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
message: format!("Downloading {}...", downloading.join(", "),),
|
||||
on_click: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -297,22 +253,9 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Checking for updates to {}...",
|
||||
checking_for_update.iter().map(|name| name.0.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}
|
||||
),
|
||||
checking_for_update.join(", "),
|
||||
),
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.statuses
|
||||
.retain(|status| !checking_for_update.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
on_click: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -324,17 +267,8 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!(
|
||||
"Failed to run {}. Click to show error.",
|
||||
failed
|
||||
.iter()
|
||||
.map(|name| name.0.as_ref())
|
||||
.fold(String::new(), |mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}),
|
||||
"Failed to download {}. Click to show error.",
|
||||
failed.join(", "),
|
||||
),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.show_error_message(&Default::default(), cx)
|
||||
@@ -343,7 +277,7 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
// Show any formatting failure
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure(cx) {
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure() {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
@@ -367,9 +301,7 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Checking for Zed updates…".to_string(),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
on_click: None,
|
||||
}),
|
||||
AutoUpdateStatus::Downloading => Some(Content {
|
||||
icon: Some(
|
||||
@@ -378,9 +310,7 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Downloading Zed update…".to_string(),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
on_click: None,
|
||||
}),
|
||||
AutoUpdateStatus::Installing => Some(Content {
|
||||
icon: Some(
|
||||
@@ -389,9 +319,7 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Installing Zed update…".to_string(),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
on_click: None,
|
||||
}),
|
||||
AutoUpdateStatus::Updated { binary_path } => Some(Content {
|
||||
icon: None,
|
||||
@@ -411,7 +339,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: "Auto update failed".to_string(),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
this.dismiss_error_message(&Default::default(), cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Idle => None,
|
||||
@@ -429,9 +357,7 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Updating {extension_id} extension…"),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
on_click: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,9 +20,13 @@ anyhow.workspace = true
|
||||
chrono.workspace = true
|
||||
futures.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
thiserror.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tokio.workspace = true
|
||||
|
||||
@@ -6,8 +6,9 @@ use std::{pin::Pin, str::FromStr};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::http::{HeaderMap, HeaderValue};
|
||||
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use isahc::http::{HeaderMap, HeaderValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
@@ -29,13 +30,13 @@ pub struct AnthropicModelCacheConfiguration {
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
#[default]
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")]
|
||||
Claude3_5Sonnet,
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")]
|
||||
Claude3Opus,
|
||||
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
|
||||
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")]
|
||||
Claude3Sonnet,
|
||||
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
|
||||
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")]
|
||||
Claude3Haiku,
|
||||
#[serde(rename = "custom")]
|
||||
Custom {
|
||||
@@ -48,7 +49,6 @@ pub enum Model {
|
||||
/// Indicates whether this custom model supports caching.
|
||||
cache_configuration: Option<AnthropicModelCacheConfiguration>,
|
||||
max_output_tokens: Option<u32>,
|
||||
default_temperature: Option<f32>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -69,10 +69,10 @@ impl Model {
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3Opus => "claude-3-opus-latest",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-latest",
|
||||
Model::Claude3Haiku => "claude-3-haiku-latest",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
|
||||
Model::Claude3Opus => "claude-3-opus-20240229",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
@@ -124,19 +124,6 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_temperature(&self) -> f32 {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => 1.0,
|
||||
Self::Custom {
|
||||
default_temperature,
|
||||
..
|
||||
} => default_temperature.unwrap_or(1.0),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tool_model_id(&self) -> &str {
|
||||
if let Self::Custom {
|
||||
tool_override: Some(tool_override),
|
||||
@@ -288,7 +275,7 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
|
||||
}
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
@@ -521,10 +508,6 @@ pub struct Usage {
|
||||
pub input_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub output_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub cache_creation_input_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub cache_read_input_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
|
||||
@@ -51,7 +51,6 @@ indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
menu.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
@@ -66,7 +65,6 @@ proto.workspace = true
|
||||
regex.workspace = true
|
||||
release_channel.workspace = true
|
||||
rope.workspace = true
|
||||
rpc.workspace = true
|
||||
schemars.workspace = true
|
||||
search.workspace = true
|
||||
semantic_index.workspace = true
|
||||
@@ -95,11 +93,9 @@ editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
languages = { workspace = true, features = ["test-support"] }
|
||||
log.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-md.workspace = true
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -41,10 +41,9 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::{
|
||||
auto_command, cargo_workspace_command, context_server_command, default_command, delta_command,
|
||||
diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command,
|
||||
prompt_command, search_command, symbols_command, tab_command, terminal_command,
|
||||
workflow_command,
|
||||
auto_command, context_server_command, default_command, diagnostics_command, docs_command,
|
||||
fetch_command, file_command, now_command, project_command, prompt_command, search_command,
|
||||
symbols_command, tab_command, terminal_command, workflow_command,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -59,7 +58,6 @@ actions!(
|
||||
[
|
||||
Assist,
|
||||
Split,
|
||||
CopyCode,
|
||||
CycleMessageRole,
|
||||
QuoteSelection,
|
||||
InsertIntoEditor,
|
||||
@@ -70,8 +68,6 @@ actions!(
|
||||
ConfirmCommand,
|
||||
NewContext,
|
||||
ToggleModelSelector,
|
||||
CycleNextInlineAssist,
|
||||
CyclePreviousInlineAssist
|
||||
]
|
||||
);
|
||||
|
||||
@@ -362,19 +358,8 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let inline_alternatives = settings
|
||||
.inline_alternatives
|
||||
.iter()
|
||||
.map(|alternative| {
|
||||
(
|
||||
LanguageModelProviderId::from(alternative.provider.clone()),
|
||||
LanguageModelId::from(alternative.model.clone()),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_active_model(&provider_name, &model_id, cx);
|
||||
registry.select_inline_alternative_models(inline_alternatives, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -382,36 +367,22 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
|
||||
slash_command_registry.register_command(file_command::FileSlashCommand, true);
|
||||
slash_command_registry.register_command(delta_command::DeltaSlashCommand, true);
|
||||
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
|
||||
slash_command_registry.register_command(tab_command::TabSlashCommand, true);
|
||||
slash_command_registry
|
||||
.register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true);
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
slash_command_registry.register_command(prompt_command::PromptSlashCommand, true);
|
||||
slash_command_registry.register_command(default_command::DefaultSlashCommand, false);
|
||||
slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true);
|
||||
slash_command_registry.register_command(now_command::NowSlashCommand, false);
|
||||
slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true);
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()),
|
||||
true,
|
||||
);
|
||||
cx.observe_flag::<project_command::ProjectSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
if is_enabled {
|
||||
slash_command_registry.register_command(
|
||||
project_command::ProjectSlashCommand::new(prompt_builder.clone()),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
cx.observe_flag::<auto_command::AutoSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
@@ -449,12 +420,10 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) {
|
||||
slash_command_registry.unregister_command(docs_command::DocsSlashCommand);
|
||||
}
|
||||
|
||||
if settings.cargo_workspace.enabled {
|
||||
slash_command_registry
|
||||
.register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true);
|
||||
if settings.project.enabled {
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
} else {
|
||||
slash_command_registry
|
||||
.unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand);
|
||||
slash_command_registry.unregister_command(project_command::ProjectSlashCommand);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,13 +12,13 @@ use crate::{
|
||||
slash_command_picker,
|
||||
terminal_inline_assistant::TerminalInlineAssistant,
|
||||
Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore,
|
||||
ContextStoreEvent, CopyCode, CycleMessageRole, DeployHistory, DeployPromptLibrary,
|
||||
InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId,
|
||||
MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext,
|
||||
PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata,
|
||||
SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, WorkflowStepResolution,
|
||||
ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId,
|
||||
InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata,
|
||||
MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand,
|
||||
PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split,
|
||||
ToggleFocus, ToggleModelSelector, WorkflowStepResolution,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection};
|
||||
use assistant_tool::ToolRegistry;
|
||||
use client::{proto, Client, Status};
|
||||
@@ -45,8 +45,7 @@ use gpui::{
|
||||
};
|
||||
use indexed_docs::IndexedDocsStore;
|
||||
use language::{
|
||||
language_settings::SoftWrap, BufferSnapshot, Capability, LanguageRegistry, LspAdapterDelegate,
|
||||
ToOffset,
|
||||
language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset,
|
||||
};
|
||||
use language_model::{
|
||||
provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId,
|
||||
@@ -55,9 +54,8 @@ use language_model::{
|
||||
use language_model::{LanguageModelImage, LanguageModelToolUse};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::lsp_store::LocalLspAdapterDelegate;
|
||||
use project::lsp_store::ProjectLspAdapterDelegate;
|
||||
use project::{Project, Worktree};
|
||||
use rope::Point;
|
||||
use search::{buffer_search::DivRegistrar, BufferSearchBar};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings};
|
||||
@@ -72,22 +70,20 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use text::SelectionGoal;
|
||||
use ui::TintColor;
|
||||
use ui::{
|
||||
prelude::*,
|
||||
utils::{format_distance_from_now, DateTimeType},
|
||||
Avatar, ButtonLike, ContextMenu, Disclosure, ElevationIndex, KeyBinding, ListItem,
|
||||
Avatar, AvatarShape, ButtonLike, ContextMenu, Disclosure, ElevationIndex, KeyBinding, ListItem,
|
||||
ListItemSpacing, PopoverMenu, PopoverMenuHandle, Tooltip,
|
||||
};
|
||||
use util::{maybe, ResultExt};
|
||||
use workspace::{
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
item::{self, FollowableItem, Item, ItemHandle},
|
||||
notifications::NotificationId,
|
||||
pane::{self, SaveIntent},
|
||||
searchable::{SearchEvent, SearchableItem},
|
||||
DraggedSelection, Pane, Save, ShowConfiguration, Toast, ToggleZoom, ToolbarItemEvent,
|
||||
DraggedSelection, Pane, Save, ShowConfiguration, ToggleZoom, ToolbarItemEvent,
|
||||
ToolbarItemLocation, ToolbarItemView, Workspace,
|
||||
};
|
||||
use workspace::{searchable::SearchableItemHandle, DraggedTab};
|
||||
@@ -109,7 +105,6 @@ pub fn init(cx: &mut AppContext) {
|
||||
.register_action(AssistantPanel::inline_assist)
|
||||
.register_action(ContextEditor::quote_selection)
|
||||
.register_action(ContextEditor::insert_selection)
|
||||
.register_action(ContextEditor::copy_code)
|
||||
.register_action(ContextEditor::insert_dragged_files)
|
||||
.register_action(AssistantPanel::show_configuration)
|
||||
.register_action(AssistantPanel::create_new_context);
|
||||
@@ -262,7 +257,9 @@ impl PickerDelegate for SavedContextPickerDelegate {
|
||||
.gap_2()
|
||||
.children(if let Some(host_user) = host_user {
|
||||
vec![
|
||||
Avatar::new(host_user.avatar_uri.clone()).into_any_element(),
|
||||
Avatar::new(host_user.avatar_uri.clone())
|
||||
.shape(AvatarShape::Circle)
|
||||
.into_any_element(),
|
||||
Label::new(format!("Shared by @{}", host_user.github_login))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
@@ -697,9 +694,7 @@ impl AssistantPanel {
|
||||
log::error!("no context found with ID: {}", context_id.to_proto());
|
||||
return;
|
||||
};
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx)
|
||||
.log_err()
|
||||
.flatten();
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx).log_err();
|
||||
|
||||
let assistant_panel = cx.view().downgrade();
|
||||
let editor = cx.new_view(|cx| {
|
||||
@@ -961,8 +956,7 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
|
||||
let project = self.project.read(cx);
|
||||
if project.is_via_collab() && project.dev_server_project_id().is_none() {
|
||||
if self.project.read(cx).is_via_collab() {
|
||||
let task = self
|
||||
.context_store
|
||||
.update(cx, |store, cx| store.create_remote_context(cx));
|
||||
@@ -973,8 +967,7 @@ impl AssistantPanel {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let workspace = this.workspace.clone();
|
||||
let project = this.project.clone();
|
||||
let lsp_adapter_delegate =
|
||||
make_lsp_adapter_delegate(&project, cx).log_err().flatten();
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err();
|
||||
|
||||
let fs = this.fs.clone();
|
||||
let project = this.project.clone();
|
||||
@@ -1004,9 +997,7 @@ impl AssistantPanel {
|
||||
None
|
||||
} else {
|
||||
let context = self.context_store.update(cx, |store, cx| store.create(cx));
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx)
|
||||
.log_err()
|
||||
.flatten();
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx).log_err();
|
||||
|
||||
let assistant_panel = cx.view().downgrade();
|
||||
let editor = cx.new_view(|cx| {
|
||||
@@ -1212,7 +1203,7 @@ impl AssistantPanel {
|
||||
let project = self.project.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let context = context.await?;
|
||||
@@ -1259,9 +1250,7 @@ impl AssistantPanel {
|
||||
.update(cx, |store, cx| store.open_remote_context(id, cx));
|
||||
let fs = self.fs.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx)
|
||||
.log_err()
|
||||
.flatten();
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx).log_err();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let context = context.await?;
|
||||
@@ -1503,13 +1492,6 @@ struct WorkflowAssist {
|
||||
|
||||
type MessageHeader = MessageMetadata;
|
||||
|
||||
#[derive(Clone)]
|
||||
enum AssistError {
|
||||
PaymentRequired,
|
||||
MaxMonthlySpendReached,
|
||||
Message(SharedString),
|
||||
}
|
||||
|
||||
pub struct ContextEditor {
|
||||
context: Model<Context>,
|
||||
fs: Arc<dyn Fs>,
|
||||
@@ -1528,7 +1510,7 @@ pub struct ContextEditor {
|
||||
workflow_steps: HashMap<Range<language::Anchor>, WorkflowStepViewState>,
|
||||
active_workflow_step: Option<ActiveWorkflowStep>,
|
||||
assistant_panel: WeakView<AssistantPanel>,
|
||||
last_error: Option<AssistError>,
|
||||
error_message: Option<SharedString>,
|
||||
show_accept_terms: bool,
|
||||
pub(crate) slash_menu_handle:
|
||||
PopoverMenuHandle<Picker<slash_command_picker::SlashCommandDelegate>>,
|
||||
@@ -1567,7 +1549,7 @@ impl ContextEditor {
|
||||
editor.set_show_runnables(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_completion_provider(Some(Box::new(completion_provider)));
|
||||
editor.set_completion_provider(Box::new(completion_provider));
|
||||
editor.set_collaboration_hub(Box::new(project.clone()));
|
||||
editor
|
||||
});
|
||||
@@ -1599,7 +1581,7 @@ impl ContextEditor {
|
||||
workflow_steps: HashMap::default(),
|
||||
active_workflow_step: None,
|
||||
assistant_panel,
|
||||
last_error: None,
|
||||
error_message: None,
|
||||
show_accept_terms: false,
|
||||
slash_menu_handle: Default::default(),
|
||||
dragged_file_worktrees: Vec::new(),
|
||||
@@ -1643,7 +1625,7 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
if !self.apply_active_workflow_step(cx) {
|
||||
self.last_error = None;
|
||||
self.error_message = None;
|
||||
self.send_to_model(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1793,7 +1775,7 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext<Self>) {
|
||||
self.last_error = None;
|
||||
self.error_message = None;
|
||||
|
||||
if self
|
||||
.context
|
||||
@@ -1924,22 +1906,7 @@ impl ContextEditor {
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
|
||||
let context = self.context.read(cx);
|
||||
let sections = context
|
||||
.slash_command_output_sections()
|
||||
.into_iter()
|
||||
.filter(|section| section.is_valid(context.buffer().read(cx)))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let snapshot = context.buffer().read(cx).snapshot();
|
||||
let output = command.run(
|
||||
arguments,
|
||||
§ions,
|
||||
snapshot,
|
||||
workspace,
|
||||
self.lsp_adapter_delegate.clone(),
|
||||
cx,
|
||||
);
|
||||
let output = command.run(arguments, workspace, self.lsp_adapter_delegate.clone(), cx);
|
||||
self.context.update(cx, |context, cx| {
|
||||
context.insert_command_output(
|
||||
command_range,
|
||||
@@ -2298,13 +2265,7 @@ impl ContextEditor {
|
||||
}
|
||||
ContextEvent::Operation(_) => {}
|
||||
ContextEvent::ShowAssistError(error_message) => {
|
||||
self.last_error = Some(AssistError::Message(error_message.clone()));
|
||||
}
|
||||
ContextEvent::ShowPaymentRequiredError => {
|
||||
self.last_error = Some(AssistError::PaymentRequired);
|
||||
}
|
||||
ContextEvent::ShowMaxMonthlySpendReachedError => {
|
||||
self.last_error = Some(AssistError::MaxMonthlySpendReached);
|
||||
self.error_message = Some(error_message.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2834,8 +2795,9 @@ impl ContextEditor {
|
||||
} else {
|
||||
// If there are multiple buffers or suggestion groups, create a multibuffer
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer =
|
||||
MultiBuffer::new(Capability::ReadWrite).with_title(resolved_step.title.clone());
|
||||
let replica_id = project.read(cx).replica_id();
|
||||
let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite)
|
||||
.with_title(resolved_step.title.clone());
|
||||
for (buffer, groups) in &resolved_step.suggestion_groups {
|
||||
let excerpt_ids = multibuffer.push_excerpts(
|
||||
buffer.clone(),
|
||||
@@ -3123,49 +3085,6 @@ impl ContextEditor {
|
||||
});
|
||||
}
|
||||
|
||||
/// Returns either the selected text, or the content of the Markdown code
|
||||
/// block surrounding the cursor.
|
||||
fn get_selection_or_code_block(
|
||||
context_editor_view: &View<ContextEditor>,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> Option<(String, bool)> {
|
||||
const CODE_FENCE_DELIMITER: &'static str = "```";
|
||||
|
||||
let context_editor = context_editor_view.read(cx).editor.read(cx);
|
||||
|
||||
if context_editor.selections.newest::<Point>(cx).is_empty() {
|
||||
let snapshot = context_editor.buffer().read(cx).snapshot(cx);
|
||||
let (_, _, snapshot) = snapshot.as_singleton()?;
|
||||
|
||||
let head = context_editor.selections.newest::<Point>(cx).head();
|
||||
let offset = snapshot.point_to_offset(head);
|
||||
|
||||
let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?;
|
||||
let mut text = snapshot
|
||||
.text_for_range(surrounding_code_block_range)
|
||||
.collect::<String>();
|
||||
|
||||
// If there is no newline trailing the closing three-backticks, then
|
||||
// tree-sitter-md extends the range of the content node to include
|
||||
// the backticks.
|
||||
if text.ends_with(CODE_FENCE_DELIMITER) {
|
||||
text.drain((text.len() - CODE_FENCE_DELIMITER.len())..);
|
||||
}
|
||||
|
||||
(!text.is_empty()).then_some((text, true))
|
||||
} else {
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
(!text.is_empty()).then_some((text, false))
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_selection(
|
||||
workspace: &mut Workspace,
|
||||
_: &InsertIntoEditor,
|
||||
@@ -3184,7 +3103,17 @@ impl ContextEditor {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) {
|
||||
let context_editor = context_editor_view.read(cx).editor.read(cx);
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
// If nothing is selected, don't delete the current selection; instead, be a no-op.
|
||||
if !text.is_empty() {
|
||||
active_editor_view.update(cx, |editor, cx| {
|
||||
editor.insert(&text, cx);
|
||||
editor.focus(cx);
|
||||
@@ -3192,36 +3121,6 @@ impl ContextEditor {
|
||||
}
|
||||
}
|
||||
|
||||
fn copy_code(workspace: &mut Workspace, _: &CopyCode, cx: &mut ViewContext<Workspace>) {
|
||||
let result = maybe!({
|
||||
let panel = workspace.panel::<AssistantPanel>(cx)?;
|
||||
let context_editor_view = panel.read(cx).active_context_editor(cx)?;
|
||||
Self::get_selection_or_code_block(&context_editor_view, cx)
|
||||
});
|
||||
let Some((text, is_code_block)) = result else {
|
||||
return;
|
||||
};
|
||||
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(text));
|
||||
|
||||
struct CopyToClipboardToast;
|
||||
workspace.show_toast(
|
||||
Toast::new(
|
||||
NotificationId::unique::<CopyToClipboardToast>(),
|
||||
format!(
|
||||
"{} copied to clipboard.",
|
||||
if is_code_block {
|
||||
"Code block"
|
||||
} else {
|
||||
"Selection"
|
||||
}
|
||||
),
|
||||
)
|
||||
.autohide(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_dragged_files(
|
||||
workspace: &mut Workspace,
|
||||
action: &InsertDraggedFiles,
|
||||
@@ -3368,7 +3267,7 @@ impl ContextEditor {
|
||||
|
||||
let fence = codeblock_fence_for_path(
|
||||
filename.as_deref(),
|
||||
Some(selection.start.row..=selection.end.row),
|
||||
Some(selection.start.row..selection.end.row),
|
||||
);
|
||||
|
||||
if let Some((line_comment_prefix, outline_text)) =
|
||||
@@ -3457,7 +3356,7 @@ impl ContextEditor {
|
||||
|
||||
fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext<Self>) {
|
||||
if self.editor.read(cx).selections.count() == 1 {
|
||||
let (copied_text, metadata, _) = self.get_clipboard_contents(cx);
|
||||
let (copied_text, metadata) = self.get_clipboard_contents(cx);
|
||||
cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata(
|
||||
copied_text,
|
||||
metadata,
|
||||
@@ -3471,9 +3370,11 @@ impl ContextEditor {
|
||||
|
||||
fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext<Self>) {
|
||||
if self.editor.read(cx).selections.count() == 1 {
|
||||
let (copied_text, metadata, selections) = self.get_clipboard_contents(cx);
|
||||
let (copied_text, metadata) = self.get_clipboard_contents(cx);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let selections = editor.selections.all::<Point>(cx);
|
||||
|
||||
editor.transact(cx, |this, cx| {
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(selections);
|
||||
@@ -3493,71 +3394,52 @@ impl ContextEditor {
|
||||
cx.propagate();
|
||||
}
|
||||
|
||||
fn get_clipboard_contents(
|
||||
&mut self,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> (String, CopyMetadata, Vec<text::Selection<usize>>) {
|
||||
let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| {
|
||||
let mut selection = editor.selections.newest::<Point>(cx);
|
||||
fn get_clipboard_contents(&mut self, cx: &mut ViewContext<Self>) -> (String, CopyMetadata) {
|
||||
let creases = self.editor.update(cx, |editor, cx| {
|
||||
let selection = editor.selections.newest::<Point>(cx);
|
||||
let selection_start = editor.selections.newest::<usize>(cx).start;
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases_in_range(
|
||||
MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1),
|
||||
&snapshot,
|
||||
)
|
||||
.filter_map(|crease| {
|
||||
if let Some(metadata) = &crease.metadata {
|
||||
let start = crease
|
||||
.range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
let end = crease
|
||||
.range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
|
||||
let is_entire_line = selection.is_empty() || editor.selections.line_mode;
|
||||
if is_entire_line {
|
||||
selection.start = Point::new(selection.start.row, 0);
|
||||
selection.end =
|
||||
cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0));
|
||||
selection.goal = SelectionGoal::None;
|
||||
}
|
||||
let range_relative_to_selection = start..end;
|
||||
|
||||
let selection_start = snapshot.point_to_offset(selection.start);
|
||||
|
||||
(
|
||||
snapshot.clone(),
|
||||
selection.clone(),
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases_in_range(
|
||||
MultiBufferRow(selection.start.row)
|
||||
..MultiBufferRow(selection.end.row + 1),
|
||||
&snapshot,
|
||||
)
|
||||
.filter_map(|crease| {
|
||||
if let Some(metadata) = &crease.metadata {
|
||||
let start = crease
|
||||
.range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
let end = crease
|
||||
.range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
|
||||
let range_relative_to_selection = start..end;
|
||||
|
||||
if range_relative_to_selection.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SelectedCreaseMetadata {
|
||||
range_relative_to_selection,
|
||||
crease: metadata.clone(),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if range_relative_to_selection.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SelectedCreaseMetadata {
|
||||
range_relative_to_selection,
|
||||
crease: metadata.clone(),
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
});
|
||||
|
||||
let selection = selection.map(|point| snapshot.point_to_offset(point));
|
||||
let context = self.context.read(cx);
|
||||
|
||||
let selection = self.editor.read(cx).selections.newest::<usize>(cx);
|
||||
let mut text = String::new();
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
@@ -3569,14 +3451,12 @@ impl ContextEditor {
|
||||
for chunk in context.buffer().read(cx).text_for_range(range) {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if message.offset_range.end < selection.range().end {
|
||||
text.push('\n');
|
||||
}
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(text, CopyMetadata { creases }, vec![selection])
|
||||
(text, CopyMetadata { creases })
|
||||
}
|
||||
|
||||
fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext<Self>) {
|
||||
@@ -4237,11 +4117,9 @@ impl ContextEditor {
|
||||
.child(Label::new(label)),
|
||||
)
|
||||
.child(
|
||||
Button::new("open-configuration", "Configure Providers")
|
||||
Button::new("open-configuration", "Open configuration")
|
||||
.size(ButtonSize::Compact)
|
||||
.icon(Some(IconName::SlidersVertical))
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.style(ButtonStyle::Filled)
|
||||
.on_click({
|
||||
let focus_handle = self.focus_handle(cx).clone();
|
||||
@@ -4318,196 +4196,6 @@ impl ContextEditor {
|
||||
focus_handle.dispatch_action(&Assist, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn render_last_error(&self, cx: &mut ViewContext<Self>) -> Option<AnyElement> {
|
||||
let last_error = self.last_error.as_ref()?;
|
||||
|
||||
Some(
|
||||
div()
|
||||
.absolute()
|
||||
.right_3()
|
||||
.bottom_12()
|
||||
.max_w_96()
|
||||
.py_2()
|
||||
.px_3()
|
||||
.elevation_2(cx)
|
||||
.occlude()
|
||||
.child(match last_error {
|
||||
AssistError::PaymentRequired => self.render_payment_required_error(cx),
|
||||
AssistError::MaxMonthlySpendReached => {
|
||||
self.render_max_monthly_spend_reached_error(cx)
|
||||
}
|
||||
AssistError::Message(error_message) => {
|
||||
self.render_assist_error(error_message, cx)
|
||||
}
|
||||
})
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_payment_required_error(&self, cx: &mut ViewContext<Self>) -> AnyElement {
|
||||
const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used.";
|
||||
const ACCOUNT_URL: &str = "https://zed.dev/account";
|
||||
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new("Free Usage Exceeded").weight(FontWeight::MEDIUM)),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(ERROR_MESSAGE)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.mt_1()
|
||||
.child(Button::new("subscribe", "Subscribe").on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.last_error = None;
|
||||
cx.open_url(ACCOUNT_URL);
|
||||
cx.notify();
|
||||
},
|
||||
)))
|
||||
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.last_error = None;
|
||||
cx.notify();
|
||||
},
|
||||
))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_max_monthly_spend_reached_error(&self, cx: &mut ViewContext<Self>) -> AnyElement {
|
||||
const ERROR_MESSAGE: &str = "You have reached your maximum monthly spend. Increase your spend limit to continue using Zed LLMs.";
|
||||
const ACCOUNT_URL: &str = "https://zed.dev/account";
|
||||
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new("Max Monthly Spend Reached").weight(FontWeight::MEDIUM)),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(ERROR_MESSAGE)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.mt_1()
|
||||
.child(
|
||||
Button::new("subscribe", "Update Monthly Spend Limit").on_click(
|
||||
cx.listener(|this, _, cx| {
|
||||
this.last_error = None;
|
||||
cx.open_url(ACCOUNT_URL);
|
||||
cx.notify();
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.last_error = None;
|
||||
cx.notify();
|
||||
},
|
||||
))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_assist_error(
|
||||
&self,
|
||||
error_message: &SharedString,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(
|
||||
Label::new("Error interacting with language model")
|
||||
.weight(FontWeight::MEDIUM),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(error_message.clone())),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.mt_1()
|
||||
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.last_error = None;
|
||||
cx.notify();
|
||||
},
|
||||
))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the contents of the *outermost* fenced code block that contains the given offset.
|
||||
fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option<Range<usize>> {
|
||||
const CODE_BLOCK_NODE: &'static str = "fenced_code_block";
|
||||
const CODE_BLOCK_CONTENT: &'static str = "code_fence_content";
|
||||
|
||||
let layer = snapshot.syntax_layers().next()?;
|
||||
|
||||
let root_node = layer.node();
|
||||
let mut cursor = root_node.walk();
|
||||
|
||||
// Go to the first child for the given offset
|
||||
while cursor.goto_first_child_for_byte(offset).is_some() {
|
||||
// If we're at the end of the node, go to the next one.
|
||||
// Example: if you have a fenced-code-block, and you're on the start of the line
|
||||
// right after the closing ```, you want to skip the fenced-code-block and
|
||||
// go to the next sibling.
|
||||
if cursor.node().end_byte() == offset {
|
||||
cursor.goto_next_sibling();
|
||||
}
|
||||
|
||||
if cursor.node().start_byte() > offset {
|
||||
break;
|
||||
}
|
||||
|
||||
// We found the fenced code block.
|
||||
if cursor.node().kind() == CODE_BLOCK_NODE {
|
||||
// Now we need to find the child node that contains the code.
|
||||
cursor.goto_first_child();
|
||||
loop {
|
||||
if cursor.node().kind() == CODE_BLOCK_CONTENT {
|
||||
return Some(cursor.node().byte_range());
|
||||
}
|
||||
if !cursor.goto_next_sibling() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn render_fold_icon_button(
|
||||
@@ -4602,7 +4290,48 @@ impl Render for ContextEditor {
|
||||
.child(element),
|
||||
)
|
||||
})
|
||||
.children(self.render_last_error(cx))
|
||||
.when_some(self.error_message.clone(), |this, error_message| {
|
||||
this.child(
|
||||
div()
|
||||
.absolute()
|
||||
.right_3()
|
||||
.bottom_12()
|
||||
.max_w_96()
|
||||
.py_2()
|
||||
.px_3()
|
||||
.elevation_2(cx)
|
||||
.occlude()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(
|
||||
Label::new("Error interacting with language model")
|
||||
.weight(FontWeight::MEDIUM),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(error_message)),
|
||||
)
|
||||
.child(h_flex().justify_end().mt_1().child(
|
||||
Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.error_message = None;
|
||||
cx.notify();
|
||||
},
|
||||
)),
|
||||
)),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
h_flex().w_full().relative().child(
|
||||
h_flex()
|
||||
@@ -5506,7 +5235,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakView<Editor>) ->
|
||||
ButtonLike::new(fold_id)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ElevatedSurface)
|
||||
.child(Icon::new(IconName::TextSnippet))
|
||||
.child(Icon::new(IconName::CursorIBeam))
|
||||
.child(Label::new(title.clone()).single_line())
|
||||
.on_click(move |_, cx| {
|
||||
editor
|
||||
@@ -5632,21 +5361,24 @@ fn render_docs_slash_command_trailer(
|
||||
fn make_lsp_adapter_delegate(
|
||||
project: &Model<Project>,
|
||||
cx: &mut AppContext,
|
||||
) -> Result<Option<Arc<dyn LspAdapterDelegate>>> {
|
||||
) -> Result<Arc<dyn LspAdapterDelegate>> {
|
||||
project.update(cx, |project, cx| {
|
||||
// TODO: Find the right worktree.
|
||||
let Some(worktree) = project.worktrees(cx).next() else {
|
||||
return Ok(None::<Arc<dyn LspAdapterDelegate>>);
|
||||
let worktree = project
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?;
|
||||
let fs = if project.is_local() {
|
||||
Some(project.fs().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let http_client = project.client().http_client().clone();
|
||||
project.lsp_store().update(cx, |lsp_store, cx| {
|
||||
Ok(Some(LocalLspAdapterDelegate::new(
|
||||
lsp_store,
|
||||
&worktree,
|
||||
http_client,
|
||||
project.fs().clone(),
|
||||
cx,
|
||||
) as Arc<dyn LspAdapterDelegate>))
|
||||
Ok(
|
||||
ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, None, cx)
|
||||
as Arc<dyn LspAdapterDelegate>,
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -5750,85 +5482,3 @@ fn configuration_error(cx: &AppContext) -> Option<ConfigurationError> {
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::{AppContext, Context};
|
||||
use language::Buffer;
|
||||
use unindent::Unindent;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_find_code_blocks(cx: &mut AppContext) {
|
||||
let markdown = languages::language("markdown", tree_sitter_md::LANGUAGE.into());
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let text = r#"
|
||||
line 0
|
||||
line 1
|
||||
```rust
|
||||
fn main() {}
|
||||
```
|
||||
line 5
|
||||
line 6
|
||||
line 7
|
||||
```go
|
||||
func main() {}
|
||||
```
|
||||
line 11
|
||||
```
|
||||
this is plain text code block
|
||||
```
|
||||
|
||||
```go
|
||||
func another() {}
|
||||
```
|
||||
line 19
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::local(text, cx);
|
||||
buffer.set_language(Some(markdown.clone()), cx);
|
||||
buffer
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
let code_blocks = vec![
|
||||
Point::new(3, 0)..Point::new(4, 0),
|
||||
Point::new(9, 0)..Point::new(10, 0),
|
||||
Point::new(13, 0)..Point::new(14, 0),
|
||||
Point::new(17, 0)..Point::new(18, 0),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|range| snapshot.point_to_offset(range.start)..snapshot.point_to_offset(range.end))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let expected_results = vec![
|
||||
(0, None),
|
||||
(1, None),
|
||||
(2, Some(code_blocks[0].clone())),
|
||||
(3, Some(code_blocks[0].clone())),
|
||||
(4, Some(code_blocks[0].clone())),
|
||||
(5, None),
|
||||
(6, None),
|
||||
(7, None),
|
||||
(8, Some(code_blocks[1].clone())),
|
||||
(9, Some(code_blocks[1].clone())),
|
||||
(10, Some(code_blocks[1].clone())),
|
||||
(11, None),
|
||||
(12, Some(code_blocks[2].clone())),
|
||||
(13, Some(code_blocks[2].clone())),
|
||||
(14, Some(code_blocks[2].clone())),
|
||||
(15, None),
|
||||
(16, Some(code_blocks[3].clone())),
|
||||
(17, Some(code_blocks[3].clone())),
|
||||
(18, Some(code_blocks[3].clone())),
|
||||
(19, None),
|
||||
];
|
||||
|
||||
for (row, expected) in expected_results {
|
||||
let offset = snapshot.point_to_offset(Point::new(row, 0));
|
||||
let range = find_surrounding_code_block(&snapshot, offset);
|
||||
assert_eq!(range, expected, "unexpected result on row {:?}", row);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +59,6 @@ pub struct AssistantSettings {
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_model: LanguageModelSelection,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
}
|
||||
|
||||
@@ -237,7 +236,6 @@ impl AssistantSettingsContent {
|
||||
})
|
||||
}
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
|
||||
},
|
||||
@@ -256,7 +254,6 @@ impl AssistantSettingsContent {
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -372,7 +369,6 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_model: None,
|
||||
inline_alternatives: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -401,8 +397,6 @@ pub struct AssistantSettingsContentV2 {
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new contexts.
|
||||
default_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
@@ -523,8 +517,10 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.default_height,
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.default_model, value.default_model);
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.default_model,
|
||||
value.default_model.map(Into::into),
|
||||
);
|
||||
// merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference
|
||||
}
|
||||
|
||||
@@ -578,7 +574,6 @@ mod tests {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
|
||||
@@ -26,7 +26,6 @@ use gpui::{
|
||||
|
||||
use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
|
||||
use language_model::{
|
||||
provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
|
||||
LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
|
||||
LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
|
||||
@@ -47,9 +46,9 @@ use std::{
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::AssistantKind;
|
||||
use text::BufferSnapshot;
|
||||
use util::{post_inc, ResultExt, TryFutureExt};
|
||||
use util::{post_inc, TryFutureExt};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
@@ -163,9 +162,6 @@ impl ContextOperation {
|
||||
)?,
|
||||
icon: section.icon_name.parse()?,
|
||||
label: section.label.into(),
|
||||
metadata: section
|
||||
.metadata
|
||||
.and_then(|metadata| serde_json::from_str(&metadata).log_err()),
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?,
|
||||
@@ -246,9 +242,6 @@ impl ContextOperation {
|
||||
)),
|
||||
icon_name: icon_name.to_string(),
|
||||
label: section.label.to_string(),
|
||||
metadata: section.metadata.as_ref().and_then(|metadata| {
|
||||
serde_json::to_string(metadata).log_err()
|
||||
}),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
@@ -295,8 +288,6 @@ impl ContextOperation {
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ContextEvent {
|
||||
ShowAssistError(SharedString),
|
||||
ShowPaymentRequiredError,
|
||||
ShowMaxMonthlySpendReachedError,
|
||||
MessagesEdited,
|
||||
SummaryChanged,
|
||||
StreamedCompletion,
|
||||
@@ -552,7 +543,7 @@ impl Context {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = cx.new_model(|_cx| {
|
||||
let buffer = Buffer::remote(
|
||||
let mut buffer = Buffer::remote(
|
||||
language::BufferId::new(1).unwrap(),
|
||||
replica_id,
|
||||
capability,
|
||||
@@ -644,13 +635,12 @@ impl Context {
|
||||
.slash_command_output_sections
|
||||
.iter()
|
||||
.filter_map(|section| {
|
||||
if section.is_valid(buffer) {
|
||||
let range = section.range.to_offset(buffer);
|
||||
let range = section.range.to_offset(buffer);
|
||||
if section.range.start.is_valid(buffer) && !range.is_empty() {
|
||||
Some(assistant_slash_command::SlashCommandOutputSection {
|
||||
range,
|
||||
icon: section.icon,
|
||||
label: section.label.clone(),
|
||||
metadata: section.metadata.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -686,7 +676,7 @@ impl Context {
|
||||
buffer.set_text(saved_context.text.as_str(), cx)
|
||||
});
|
||||
let operations = saved_context.into_ops(&this.buffer, cx);
|
||||
this.apply_ops(operations, cx);
|
||||
this.apply_ops(operations, cx).unwrap();
|
||||
this
|
||||
}
|
||||
|
||||
@@ -759,7 +749,7 @@ impl Context {
|
||||
&mut self,
|
||||
ops: impl IntoIterator<Item = ContextOperation>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
) -> Result<()> {
|
||||
let mut buffer_ops = Vec::new();
|
||||
for op in ops {
|
||||
match op {
|
||||
@@ -768,8 +758,10 @@ impl Context {
|
||||
}
|
||||
}
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx));
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
|
||||
self.flush_ops(cx);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
|
||||
@@ -1005,17 +997,14 @@ impl Context {
|
||||
fn handle_buffer_event(
|
||||
&mut self,
|
||||
_: Model<Buffer>,
|
||||
event: &language::BufferEvent,
|
||||
event: &language::Event,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
language::BufferEvent::Operation {
|
||||
operation,
|
||||
is_local: true,
|
||||
} => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation(
|
||||
operation.clone(),
|
||||
))),
|
||||
language::BufferEvent::Edited => {
|
||||
language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
|
||||
ContextOperation::BufferOperation(operation.clone()),
|
||||
)),
|
||||
language::Event::Edited => {
|
||||
self.count_remaining_tokens(cx);
|
||||
self.reparse(cx);
|
||||
// Use `inclusive = true` to invalidate a step when an edit occurs
|
||||
@@ -1836,7 +1825,6 @@ impl Context {
|
||||
..buffer.anchor_before(start + section.range.end),
|
||||
icon: section.icon,
|
||||
label: section.label,
|
||||
metadata: section.metadata,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
|
||||
@@ -1973,9 +1961,8 @@ impl Context {
|
||||
}
|
||||
|
||||
pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider = model_registry.active_provider()?;
|
||||
let model = model_registry.active_model()?;
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model()?;
|
||||
let last_message_id = self.get_last_valid_message_id(cx)?;
|
||||
|
||||
if !provider.is_authenticated(cx) {
|
||||
@@ -2115,53 +2102,34 @@ impl Context {
|
||||
let result = stream_completion.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let error_message = if let Some(error) = result.as_ref().err() {
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ContextEvent::ShowPaymentRequiredError);
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
metadata.status = MessageStatus::Canceled;
|
||||
});
|
||||
Some(error.to_string())
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ContextEvent::ShowMaxMonthlySpendReachedError);
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
metadata.status = MessageStatus::Canceled;
|
||||
});
|
||||
Some(error.to_string())
|
||||
let error_message = result
|
||||
.as_ref()
|
||||
.err()
|
||||
.map(|error| error.to_string().trim().to_string());
|
||||
|
||||
if let Some(error_message) = error_message.as_ref() {
|
||||
cx.emit(ContextEvent::ShowAssistError(SharedString::from(
|
||||
error_message.clone(),
|
||||
)));
|
||||
}
|
||||
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
if let Some(error_message) = error_message.as_ref() {
|
||||
metadata.status =
|
||||
MessageStatus::Error(SharedString::from(error_message.clone()));
|
||||
} else {
|
||||
let error_message = error.to_string().trim().to_string();
|
||||
cx.emit(ContextEvent::ShowAssistError(SharedString::from(
|
||||
error_message.clone(),
|
||||
)));
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
metadata.status =
|
||||
MessageStatus::Error(SharedString::from(error_message.clone()));
|
||||
});
|
||||
Some(error_message)
|
||||
}
|
||||
} else {
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
metadata.status = MessageStatus::Done;
|
||||
});
|
||||
None
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(telemetry) = this.telemetry.as_ref() {
|
||||
let language_name = this
|
||||
.buffer
|
||||
.read(cx)
|
||||
.language()
|
||||
.map(|language| language.name());
|
||||
telemetry.report_assistant_event(AssistantEvent {
|
||||
conversation_id: Some(this.id.0.clone()),
|
||||
kind: AssistantKind::Panel,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model.telemetry_id(),
|
||||
model_provider: model.provider_id().to_string(),
|
||||
telemetry.report_assistant_event(
|
||||
Some(this.id.0.clone()),
|
||||
AssistantKind::Panel,
|
||||
model.telemetry_id(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
});
|
||||
);
|
||||
}
|
||||
|
||||
if let Ok(stop_reason) = result {
|
||||
@@ -2205,7 +2173,7 @@ impl Context {
|
||||
messages: Vec::new(),
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
temperature: 1.0,
|
||||
};
|
||||
for message in self.messages(cx) {
|
||||
if message.status != MessageStatus::Done {
|
||||
@@ -3009,7 +2977,6 @@ impl SavedContext {
|
||||
..buffer.anchor_before(section.range.end),
|
||||
icon: section.icon,
|
||||
label: section.label,
|
||||
metadata: section.metadata,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
|
||||
@@ -12,7 +12,7 @@ use assistant_slash_command::{
|
||||
use collections::HashSet;
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext, Model, SharedString, Task, TestAppContext, WeakView};
|
||||
use language::{Buffer, BufferSnapshot, LanguageRegistry, LspAdapterDelegate};
|
||||
use language::{Buffer, LanguageRegistry, LspAdapterDelegate};
|
||||
use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Role};
|
||||
use parking_lot::Mutex;
|
||||
use project::Project;
|
||||
@@ -1089,7 +1089,6 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|
||||
range: section_start..section_end,
|
||||
icon: ui::IconName::Ai,
|
||||
label: "section".into(),
|
||||
metadata: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1166,7 +1165,9 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|
||||
);
|
||||
|
||||
network.lock().broadcast(replica_id, ops_to_send);
|
||||
context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx));
|
||||
context
|
||||
.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx))
|
||||
.unwrap();
|
||||
} else if rng.gen_bool(0.1) && replica_id != 0 {
|
||||
log::info!("Context {}: disconnecting", context_index);
|
||||
network.lock().disconnect_peer(replica_id);
|
||||
@@ -1178,7 +1179,9 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|
||||
.map(ContextOperation::from_proto)
|
||||
.collect::<Result<Vec<_>>>()
|
||||
.unwrap();
|
||||
context.update(cx, |context, cx| context.apply_ops(ops, cx));
|
||||
context
|
||||
.update(cx, |context, cx| context.apply_ops(ops, cx))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1422,8 +1425,6 @@ impl SlashCommand for FakeSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
_cx: &mut WindowContext,
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::{
|
||||
prompts::PromptBuilder, Context, ContextEvent, ContextId, ContextOperation, ContextVersion,
|
||||
SavedContext, SavedContextMetadata,
|
||||
};
|
||||
use ::proto::AnyProtoClient;
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{proto, telemetry::Telemetry, Client, TypedEnvelope};
|
||||
use clock::ReplicaId;
|
||||
@@ -15,7 +16,6 @@ use language::LanguageRegistry;
|
||||
use paths::contexts_dir;
|
||||
use project::Project;
|
||||
use regex::Regex;
|
||||
use rpc::AnyProtoClient;
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
ffi::OsStr,
|
||||
@@ -223,7 +223,7 @@ impl ContextStore {
|
||||
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
let operation_proto = envelope.payload.operation.context("invalid operation")?;
|
||||
let operation = ContextOperation::from_proto(operation_proto)?;
|
||||
context.update(cx, |context, cx| context.apply_ops([operation], cx));
|
||||
context.update(cx, |context, cx| context.apply_ops([operation], cx))?;
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
@@ -357,6 +357,9 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
|
||||
}
|
||||
|
||||
let replica_id = project.replica_id();
|
||||
let capability = project.capability();
|
||||
@@ -391,7 +394,7 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
@@ -485,6 +488,9 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
|
||||
}
|
||||
|
||||
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
|
||||
return Task::ready(Ok(context));
|
||||
@@ -525,7 +531,7 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
|
||||
@@ -521,9 +521,9 @@ impl PromptLibrary {
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_use_modal_editing(false);
|
||||
editor.set_current_line_highlight(Some(CurrentLineHighlight::None));
|
||||
editor.set_completion_provider(Some(Box::new(
|
||||
editor.set_completion_provider(Box::new(
|
||||
SlashCommandCompletionProvider::new(None, None),
|
||||
)));
|
||||
));
|
||||
if focus {
|
||||
editor.focus(cx);
|
||||
}
|
||||
@@ -796,7 +796,7 @@ impl PromptLibrary {
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
temperature: 1.,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
@@ -910,7 +910,7 @@ impl PromptLibrary {
|
||||
.features
|
||||
.clone(),
|
||||
font_size: HeadlineSize::Large
|
||||
.rems()
|
||||
.size()
|
||||
.into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
line_height: relative(
|
||||
@@ -921,8 +921,10 @@ impl PromptLibrary {
|
||||
scrollbar_width: Pixels::ZERO,
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
status: cx.theme().status().clone(),
|
||||
inlay_hints_style:
|
||||
editor::make_inlay_hints_style(cx),
|
||||
inlay_hints_style: HighlightStyle {
|
||||
color: Some(cx.theme().status().hint),
|
||||
..HighlightStyle::default()
|
||||
},
|
||||
suggestions_style: HighlightStyle {
|
||||
color: Some(cx.theme().status().predictive),
|
||||
..HighlightStyle::default()
|
||||
|
||||
@@ -4,20 +4,13 @@ use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::AssetSource;
|
||||
use handlebars::{Handlebars, RenderError};
|
||||
use language::{BufferSnapshot, LanguageName, Point};
|
||||
use language::{BufferSnapshot, LanguageName};
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration};
|
||||
use text::LineEnding;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ContentPromptDiagnosticContext {
|
||||
pub line_number: usize,
|
||||
pub error_message: String,
|
||||
pub code_content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ContentPromptContext {
|
||||
pub content_type: String,
|
||||
@@ -27,7 +20,6 @@ pub struct ContentPromptContext {
|
||||
pub document_content: String,
|
||||
pub user_prompt: String,
|
||||
pub rewrite_section: Option<String>,
|
||||
pub diagnostic_errors: Vec<ContentPromptDiagnosticContext>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -40,11 +32,6 @@ pub struct TerminalAssistantPromptContext {
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ProjectSlashCommandPromptContext {
|
||||
pub context_buffer: String,
|
||||
}
|
||||
|
||||
/// Context required to generate a workflow step resolution prompt.
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct StepResolutionContext {
|
||||
@@ -95,9 +82,10 @@ impl PromptBuilder {
|
||||
/// and application context.
|
||||
/// * `handlebars` - An `Arc<Mutex<Handlebars>>` for registering and updating templates.
|
||||
fn watch_fs_for_template_overrides(
|
||||
params: PromptLoadingParams,
|
||||
mut params: PromptLoadingParams,
|
||||
handlebars: Arc<Mutex<Handlebars<'static>>>,
|
||||
) {
|
||||
params.repo_path = None;
|
||||
let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref());
|
||||
params.cx.background_executor()
|
||||
.spawn(async move {
|
||||
@@ -232,8 +220,7 @@ impl PromptBuilder {
|
||||
let before_range = 0..range.start;
|
||||
let truncated_before = if before_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right);
|
||||
start..range.start
|
||||
range.start - MAX_CTX..range.start
|
||||
} else {
|
||||
before_range
|
||||
};
|
||||
@@ -241,8 +228,7 @@ impl PromptBuilder {
|
||||
let after_range = range.end..buffer.len();
|
||||
let truncated_after = if after_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left);
|
||||
range.end..end
|
||||
range.end..range.end + MAX_CTX
|
||||
} else {
|
||||
after_range
|
||||
};
|
||||
@@ -273,17 +259,6 @@ impl PromptBuilder {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false);
|
||||
let diagnostic_errors: Vec<ContentPromptDiagnosticContext> = diagnostics
|
||||
.map(|entry| {
|
||||
let start = entry.range.start;
|
||||
ContentPromptDiagnosticContext {
|
||||
line_number: (start.row + 1) as usize,
|
||||
error_message: entry.diagnostic.message.clone(),
|
||||
code_content: buffer.text_for_range(entry.range.clone()).collect(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let context = ContentPromptContext {
|
||||
content_type: content_type.to_string(),
|
||||
@@ -293,8 +268,8 @@ impl PromptBuilder {
|
||||
document_content,
|
||||
user_prompt,
|
||||
rewrite_section,
|
||||
diagnostic_errors,
|
||||
};
|
||||
|
||||
self.handlebars.lock().render("content_prompt", &context)
|
||||
}
|
||||
|
||||
@@ -322,14 +297,4 @@ impl PromptBuilder {
|
||||
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("edit_workflow", &())
|
||||
}
|
||||
|
||||
pub fn generate_project_slash_command_prompt(
|
||||
&self,
|
||||
context_buffer: String,
|
||||
) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render(
|
||||
"project_slash_command",
|
||||
&ProjectSlashCommandPromptContext { context_buffer },
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,11 +18,10 @@ use std::{
|
||||
};
|
||||
use ui::ActiveTheme;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub mod auto_command;
|
||||
pub mod cargo_workspace_command;
|
||||
pub mod context_server_command;
|
||||
pub mod default_command;
|
||||
pub mod delta_command;
|
||||
pub mod diagnostics_command;
|
||||
pub mod docs_command;
|
||||
pub mod fetch_command;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use super::create_label_for_command;
|
||||
use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use feature_flags::FeatureFlag;
|
||||
use futures::StreamExt;
|
||||
use gpui::{AppContext, AsyncAppContext, Task, WeakView};
|
||||
@@ -31,11 +31,11 @@ impl SlashCommand for AutoCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Automatically infer what context to add".into()
|
||||
"Automatically infer what context to add, based on your prompt".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Automatically Infer Context".into()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &AppContext) -> CodeLabel {
|
||||
@@ -87,8 +87,6 @@ impl SlashCommand for AutoCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: language::BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -216,7 +214,7 @@ async fn commands_for_summaries(
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
temperature: 1.0,
|
||||
};
|
||||
|
||||
while let Some(current_summaries) = stack.pop() {
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Model, Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct CargoWorkspaceSlashCommand;
|
||||
|
||||
impl CargoWorkspaceSlashCommand {
|
||||
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
|
||||
let buffer = fs.load(path_to_cargo_toml).await?;
|
||||
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
|
||||
|
||||
let mut message = String::new();
|
||||
writeln!(message, "You are in a Rust project.")?;
|
||||
|
||||
if let Some(workspace) = cargo_toml.workspace {
|
||||
writeln!(
|
||||
message,
|
||||
"The project is a Cargo workspace with the following members:"
|
||||
)?;
|
||||
for member in workspace.members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
|
||||
if !workspace.default_members.is_empty() {
|
||||
writeln!(message, "The default members are:")?;
|
||||
for member in workspace.default_members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !workspace.dependencies.is_empty() {
|
||||
writeln!(
|
||||
message,
|
||||
"The following workspace dependencies are installed:"
|
||||
)?;
|
||||
for dependency in workspace.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
} else if let Some(package) = cargo_toml.package {
|
||||
writeln!(
|
||||
message,
|
||||
"The project name is \"{name}\".",
|
||||
name = package.name
|
||||
)?;
|
||||
|
||||
let description = package
|
||||
.description
|
||||
.as_ref()
|
||||
.and_then(|description| description.get().ok().cloned());
|
||||
if let Some(description) = description.as_ref() {
|
||||
writeln!(message, "It describes itself as \"{description}\".")?;
|
||||
}
|
||||
|
||||
if !cargo_toml.dependencies.is_empty() {
|
||||
writeln!(message, "The following dependencies are installed:")?;
|
||||
for dependency in cargo_toml.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path("Cargo.toml")?;
|
||||
let path = ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
};
|
||||
Some(Arc::from(
|
||||
project.read(cx).absolute_path(&path, cx)?.as_path(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for CargoWorkspaceSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"cargo-workspace".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert project workspace metadata".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Project Workspace Metadata".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let fs = workspace.project().read(cx).fs().clone();
|
||||
let path = Self::path_to_cargo_toml(project, cx);
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
let path = path.with_context(|| "Cargo.toml not found")?;
|
||||
Self::build_message(fs, &path).await
|
||||
});
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = output.await?;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::FileTree,
|
||||
label: "Project".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,7 @@ use context_servers::{
|
||||
protocol::PromptInfo,
|
||||
};
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use text::LineEnding;
|
||||
@@ -96,6 +96,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(completions)
|
||||
})
|
||||
} else {
|
||||
@@ -106,8 +107,6 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -142,7 +141,6 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
.description
|
||||
.unwrap_or(format!("Result from {}", prompt_name)),
|
||||
),
|
||||
metadata: None,
|
||||
}],
|
||||
text: prompt,
|
||||
run_commands_in_text: false,
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
@@ -43,8 +43,6 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -72,7 +70,6 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
range: 0..text.len(),
|
||||
icon: IconName::Library,
|
||||
label: "Default".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
text,
|
||||
run_commands_in_text: true,
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use collections::HashSet;
|
||||
use futures::future;
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use text::OffsetRangeExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct DeltaSlashCommand;
|
||||
|
||||
impl SlashCommand for DeltaSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"delta".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Re-insert changed files".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancellation_flag: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let mut paths = HashSet::default();
|
||||
let mut file_command_old_outputs = Vec::new();
|
||||
let mut file_command_new_outputs = Vec::new();
|
||||
for section in context_slash_command_output_sections.iter().rev() {
|
||||
if let Some(metadata) = section
|
||||
.metadata
|
||||
.as_ref()
|
||||
.and_then(|value| serde_json::from_value::<FileCommandMetadata>(value.clone()).ok())
|
||||
{
|
||||
if paths.insert(metadata.path.clone()) {
|
||||
file_command_old_outputs.push(
|
||||
context_buffer
|
||||
.as_rope()
|
||||
.slice(section.range.to_offset(&context_buffer)),
|
||||
);
|
||||
file_command_new_outputs.push(Arc::new(FileSlashCommand).run(
|
||||
&[metadata.path.clone()],
|
||||
context_slash_command_output_sections,
|
||||
context_buffer.clone(),
|
||||
workspace.clone(),
|
||||
delegate.clone(),
|
||||
cx,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.background_executor().spawn(async move {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
|
||||
let file_command_new_outputs = future::join_all(file_command_new_outputs).await;
|
||||
for (old_text, new_output) in file_command_old_outputs
|
||||
.into_iter()
|
||||
.zip(file_command_new_outputs)
|
||||
{
|
||||
if let Ok(new_output) = new_output {
|
||||
if let Some(file_command_range) = new_output.sections.first() {
|
||||
let new_text = &new_output.text[file_command_range.range.clone()];
|
||||
if old_text.chars().ne(new_text.chars()) {
|
||||
output.sections.extend(new_output.sections.into_iter().map(
|
||||
|section| SlashCommandOutputSection {
|
||||
range: output.text.len() + section.range.start
|
||||
..output.text.len() + section.range.end,
|
||||
icon: section.icon,
|
||||
label: section.label,
|
||||
metadata: section.metadata,
|
||||
},
|
||||
));
|
||||
output.text.push_str(&new_output.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -9,9 +9,10 @@ use language::{
|
||||
};
|
||||
use project::{DiagnosticSummary, PathMatchCandidateSet, Project};
|
||||
use rope::Point;
|
||||
use std::fmt::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::{Path, PathBuf},
|
||||
ops::Range,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
@@ -95,7 +96,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Diagnostics".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -162,8 +163,6 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -176,7 +175,68 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
|
||||
cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) })
|
||||
cx.spawn(move |_| async move {
|
||||
let Some((text, sections)) = task.await? else {
|
||||
return Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..1,
|
||||
icon: IconName::Library,
|
||||
label: "No Diagnostics".into(),
|
||||
}],
|
||||
text: "\n".to_string(),
|
||||
run_commands_in_text: true,
|
||||
});
|
||||
};
|
||||
|
||||
let sections = sections
|
||||
.into_iter()
|
||||
.map(|(range, placeholder_type)| SlashCommandOutputSection {
|
||||
range,
|
||||
icon: match placeholder_type {
|
||||
PlaceholderType::Root(_, _) => IconName::Warning,
|
||||
PlaceholderType::File(_) => IconName::File,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => {
|
||||
IconName::Warning
|
||||
}
|
||||
},
|
||||
label: match placeholder_type {
|
||||
PlaceholderType::Root(summary, source) => {
|
||||
let mut label = String::new();
|
||||
label.push_str("Diagnostics");
|
||||
if let Some(source) = source {
|
||||
write!(label, " ({})", source).unwrap();
|
||||
}
|
||||
|
||||
if summary.error_count > 0 || summary.warning_count > 0 {
|
||||
label.push(':');
|
||||
|
||||
if summary.error_count > 0 {
|
||||
write!(label, " {} errors", summary.error_count).unwrap();
|
||||
if summary.warning_count > 0 {
|
||||
label.push_str(",");
|
||||
}
|
||||
}
|
||||
|
||||
if summary.warning_count > 0 {
|
||||
write!(label, " {} warnings", summary.warning_count).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
label.into()
|
||||
}
|
||||
PlaceholderType::File(file_path) => file_path.into(),
|
||||
PlaceholderType::Diagnostic(_, message) => message.into(),
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -217,7 +277,7 @@ fn collect_diagnostics(
|
||||
project: Model<Project>,
|
||||
options: Options,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<Option<SlashCommandOutput>>> {
|
||||
) -> Task<Result<Option<(String, Vec<(Range<usize>, PlaceholderType)>)>>> {
|
||||
let error_source = if let Some(path_matcher) = &options.path_matcher {
|
||||
debug_assert_eq!(path_matcher.sources().len(), 1);
|
||||
Some(path_matcher.sources().first().cloned().unwrap_or_default())
|
||||
@@ -258,13 +318,13 @@ fn collect_diagnostics(
|
||||
.collect();
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
|
||||
let mut text = String::new();
|
||||
if let Some(error_source) = error_source.as_ref() {
|
||||
writeln!(output.text, "diagnostics: {}", error_source).unwrap();
|
||||
writeln!(text, "diagnostics: {}", error_source).unwrap();
|
||||
} else {
|
||||
writeln!(output.text, "diagnostics").unwrap();
|
||||
writeln!(text, "diagnostics").unwrap();
|
||||
}
|
||||
let mut sections: Vec<(Range<usize>, PlaceholderType)> = Vec::new();
|
||||
|
||||
let mut project_summary = DiagnosticSummary::default();
|
||||
for (project_path, path, summary) in diagnostic_summaries {
|
||||
@@ -281,10 +341,10 @@ fn collect_diagnostics(
|
||||
continue;
|
||||
}
|
||||
|
||||
let last_end = output.text.len();
|
||||
let last_end = text.len();
|
||||
let file_path = path.to_string_lossy().to_string();
|
||||
if !glob_is_exact_file_match {
|
||||
writeln!(&mut output.text, "{file_path}").unwrap();
|
||||
writeln!(&mut text, "{file_path}").unwrap();
|
||||
}
|
||||
|
||||
if let Some(buffer) = project_handle
|
||||
@@ -292,73 +352,75 @@ fn collect_diagnostics(
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
let snapshot = cx.read_model(&buffer, |buffer, _| buffer.snapshot())?;
|
||||
collect_buffer_diagnostics(&mut output, &snapshot, options.include_warnings);
|
||||
collect_buffer_diagnostics(
|
||||
&mut text,
|
||||
&mut sections,
|
||||
cx.read_model(&buffer, |buffer, _| buffer.snapshot())?,
|
||||
options.include_warnings,
|
||||
);
|
||||
}
|
||||
|
||||
if !glob_is_exact_file_match {
|
||||
output.sections.push(SlashCommandOutputSection {
|
||||
range: last_end..output.text.len().saturating_sub(1),
|
||||
icon: IconName::File,
|
||||
label: file_path.into(),
|
||||
metadata: None,
|
||||
});
|
||||
sections.push((
|
||||
last_end..text.len().saturating_sub(1),
|
||||
PlaceholderType::File(file_path),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
// No diagnostics found
|
||||
if output.sections.is_empty() {
|
||||
if sections.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mut label = String::new();
|
||||
label.push_str("Diagnostics");
|
||||
if let Some(source) = error_source {
|
||||
write!(label, " ({})", source).unwrap();
|
||||
}
|
||||
|
||||
if project_summary.error_count > 0 || project_summary.warning_count > 0 {
|
||||
label.push(':');
|
||||
|
||||
if project_summary.error_count > 0 {
|
||||
write!(label, " {} errors", project_summary.error_count).unwrap();
|
||||
if project_summary.warning_count > 0 {
|
||||
label.push_str(",");
|
||||
}
|
||||
}
|
||||
|
||||
if project_summary.warning_count > 0 {
|
||||
write!(label, " {} warnings", project_summary.warning_count).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
output.sections.insert(
|
||||
0,
|
||||
SlashCommandOutputSection {
|
||||
range: 0..output.text.len(),
|
||||
icon: IconName::Warning,
|
||||
label: label.into(),
|
||||
metadata: None,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(Some(output))
|
||||
sections.push((
|
||||
0..text.len(),
|
||||
PlaceholderType::Root(project_summary, error_source),
|
||||
));
|
||||
Ok(Some((text, sections)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn collect_buffer_diagnostics(
|
||||
output: &mut SlashCommandOutput,
|
||||
pub fn buffer_has_error_diagnostics(snapshot: &BufferSnapshot) -> bool {
|
||||
for (_, group) in snapshot.diagnostic_groups(None) {
|
||||
let entry = &group.entries[group.primary_ix];
|
||||
if entry.diagnostic.severity == DiagnosticSeverity::ERROR {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn write_single_file_diagnostics(
|
||||
output: &mut String,
|
||||
path: Option<&Path>,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> bool {
|
||||
if let Some(path) = path {
|
||||
if buffer_has_error_diagnostics(&snapshot) {
|
||||
output.push_str("/diagnostics ");
|
||||
output.push_str(&path.to_string_lossy());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn collect_buffer_diagnostics(
|
||||
text: &mut String,
|
||||
sections: &mut Vec<(Range<usize>, PlaceholderType)>,
|
||||
snapshot: BufferSnapshot,
|
||||
include_warnings: bool,
|
||||
) {
|
||||
for (_, group) in snapshot.diagnostic_groups(None) {
|
||||
let entry = &group.entries[group.primary_ix];
|
||||
collect_diagnostic(output, entry, &snapshot, include_warnings)
|
||||
collect_diagnostic(text, sections, entry, &snapshot, include_warnings)
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_diagnostic(
|
||||
output: &mut SlashCommandOutput,
|
||||
text: &mut String,
|
||||
sections: &mut Vec<(Range<usize>, PlaceholderType)>,
|
||||
entry: &DiagnosticEntry<Anchor>,
|
||||
snapshot: &BufferSnapshot,
|
||||
include_warnings: bool,
|
||||
@@ -366,17 +428,17 @@ fn collect_diagnostic(
|
||||
const EXCERPT_EXPANSION_SIZE: u32 = 2;
|
||||
const MAX_MESSAGE_LENGTH: usize = 2000;
|
||||
|
||||
let (ty, icon) = match entry.diagnostic.severity {
|
||||
let ty = match entry.diagnostic.severity {
|
||||
DiagnosticSeverity::WARNING => {
|
||||
if !include_warnings {
|
||||
return;
|
||||
}
|
||||
("warning", IconName::Warning)
|
||||
DiagnosticType::Warning
|
||||
}
|
||||
DiagnosticSeverity::ERROR => ("error", IconName::XCircle),
|
||||
DiagnosticSeverity::ERROR => DiagnosticType::Error,
|
||||
_ => return,
|
||||
};
|
||||
let prev_len = output.text.len();
|
||||
let prev_len = text.len();
|
||||
|
||||
let range = entry.range.to_point(snapshot);
|
||||
let diagnostic_row_number = range.start.row + 1;
|
||||
@@ -386,11 +448,11 @@ fn collect_diagnostic(
|
||||
let excerpt_range =
|
||||
Point::new(start_row, 0).to_offset(&snapshot)..Point::new(end_row, 0).to_offset(&snapshot);
|
||||
|
||||
output.text.push_str("```");
|
||||
text.push_str("```");
|
||||
if let Some(language_name) = snapshot.language().map(|l| l.code_fence_block_name()) {
|
||||
output.text.push_str(&language_name);
|
||||
text.push_str(&language_name);
|
||||
}
|
||||
output.text.push('\n');
|
||||
text.push('\n');
|
||||
|
||||
let mut buffer_text = String::new();
|
||||
for chunk in snapshot.text_for_range(excerpt_range) {
|
||||
@@ -399,26 +461,46 @@ fn collect_diagnostic(
|
||||
|
||||
for (i, line) in buffer_text.lines().enumerate() {
|
||||
let line_number = start_row + i as u32 + 1;
|
||||
writeln!(output.text, "{}", line).unwrap();
|
||||
writeln!(text, "{}", line).unwrap();
|
||||
|
||||
if line_number == diagnostic_row_number {
|
||||
output.text.push_str("//");
|
||||
let prev_len = output.text.len();
|
||||
write!(output.text, " {}: ", ty).unwrap();
|
||||
let padding = output.text.len() - prev_len;
|
||||
text.push_str("//");
|
||||
let prev_len = text.len();
|
||||
write!(text, " {}: ", ty.as_str()).unwrap();
|
||||
let padding = text.len() - prev_len;
|
||||
|
||||
let message = util::truncate(&entry.diagnostic.message, MAX_MESSAGE_LENGTH)
|
||||
.replace('\n', format!("\n//{:padding$}", "").as_str());
|
||||
|
||||
writeln!(output.text, "{message}").unwrap();
|
||||
writeln!(text, "{message}").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(output.text, "```").unwrap();
|
||||
output.sections.push(SlashCommandOutputSection {
|
||||
range: prev_len..output.text.len().saturating_sub(1),
|
||||
icon,
|
||||
label: entry.diagnostic.message.clone().into(),
|
||||
metadata: None,
|
||||
});
|
||||
writeln!(text, "```").unwrap();
|
||||
sections.push((
|
||||
prev_len..text.len().saturating_sub(1),
|
||||
PlaceholderType::Diagnostic(ty, entry.diagnostic.message.clone()),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum PlaceholderType {
|
||||
Root(DiagnosticSummary, Option<String>),
|
||||
File(String),
|
||||
Diagnostic(DiagnosticType, String),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum DiagnosticType {
|
||||
Warning,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl DiagnosticType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
DiagnosticType::Warning => "warning",
|
||||
DiagnosticType::Error => "error",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use indexed_docs::{
|
||||
DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName,
|
||||
ProviderId,
|
||||
};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use project::{Project, ProjectPath};
|
||||
use ui::prelude::*;
|
||||
use util::{maybe, ResultExt};
|
||||
@@ -269,8 +269,6 @@ impl SlashCommand for DocsSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -351,7 +349,6 @@ impl SlashCommand for DocsSlashCommand {
|
||||
range,
|
||||
icon: IconName::FileDoc,
|
||||
label: format!("docs ({provider}): {key}",).into(),
|
||||
metadata: None,
|
||||
})
|
||||
.collect(),
|
||||
run_commands_in_text: false,
|
||||
|
||||
@@ -11,7 +11,7 @@ use futures::AsyncReadExt;
|
||||
use gpui::{Task, WeakView};
|
||||
use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -104,11 +104,11 @@ impl SlashCommand for FetchSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert fetched URL contents".into()
|
||||
"insert URL contents".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert fetched URL contents".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -128,8 +128,6 @@ impl SlashCommand for FetchSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -163,7 +161,6 @@ impl SlashCommand for FetchSlashCommand {
|
||||
range,
|
||||
icon: IconName::AtSign,
|
||||
label: format!("fetch {}", url).into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput};
|
||||
use super::{diagnostics_command::write_single_file_diagnostics, SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection};
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{AppContext, Model, Task, View, WeakView};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
|
||||
use project::{PathMatchCandidateSet, Project};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
ops::{Range, RangeInclusive},
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
@@ -110,11 +109,11 @@ impl SlashCommand for FileSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert file".into()
|
||||
"insert file".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert File".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -176,8 +175,6 @@ impl SlashCommand for FileSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -190,15 +187,54 @@ impl SlashCommand for FileSlashCommand {
|
||||
return Task::ready(Err(anyhow!("missing path")));
|
||||
};
|
||||
|
||||
collect_files(workspace.read(cx).project().clone(), arguments, cx)
|
||||
let task = collect_files(workspace.read(cx).project().clone(), arguments, cx);
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let output = task.await?;
|
||||
Ok(SlashCommandOutput {
|
||||
text: output.completion_text,
|
||||
sections: output
|
||||
.files
|
||||
.into_iter()
|
||||
.map(|file| {
|
||||
build_entry_output_section(
|
||||
file.range_in_text,
|
||||
Some(&file.path),
|
||||
file.entry_type == EntryType::Directory,
|
||||
None,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||
enum EntryType {
|
||||
File,
|
||||
Directory,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
struct FileCommandOutput {
|
||||
completion_text: String,
|
||||
files: Vec<OutputFile>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
struct OutputFile {
|
||||
range_in_text: Range<usize>,
|
||||
path: PathBuf,
|
||||
entry_type: EntryType,
|
||||
}
|
||||
|
||||
fn collect_files(
|
||||
project: Model<Project>,
|
||||
glob_inputs: &[String],
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
) -> Task<Result<FileCommandOutput>> {
|
||||
let Ok(matchers) = glob_inputs
|
||||
.into_iter()
|
||||
.map(|glob_input| {
|
||||
@@ -218,7 +254,8 @@ fn collect_files(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
let mut text = String::new();
|
||||
let mut ranges = Vec::new();
|
||||
for snapshot in snapshots {
|
||||
let worktree_id = snapshot.id();
|
||||
let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
|
||||
@@ -242,12 +279,11 @@ fn collect_files(
|
||||
break;
|
||||
}
|
||||
let (_, entry_name, start) = directory_stack.pop().unwrap();
|
||||
output.sections.push(build_entry_output_section(
|
||||
start..output.text.len().saturating_sub(1),
|
||||
Some(&PathBuf::from(entry_name)),
|
||||
true,
|
||||
None,
|
||||
));
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: start..text.len().saturating_sub(1),
|
||||
path: PathBuf::from(entry_name),
|
||||
entry_type: EntryType::Directory,
|
||||
});
|
||||
}
|
||||
|
||||
let filename = entry
|
||||
@@ -279,23 +315,21 @@ fn collect_files(
|
||||
continue;
|
||||
}
|
||||
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
|
||||
let entry_start = output.text.len();
|
||||
let entry_start = text.len();
|
||||
if prefix_paths.is_empty() {
|
||||
if is_top_level_directory {
|
||||
output
|
||||
.text
|
||||
.push_str(&path_including_worktree_name.to_string_lossy());
|
||||
text.push_str(&path_including_worktree_name.to_string_lossy());
|
||||
is_top_level_directory = false;
|
||||
} else {
|
||||
output.text.push_str(&filename);
|
||||
text.push_str(&filename);
|
||||
}
|
||||
directory_stack.push((entry.path.clone(), filename, entry_start));
|
||||
} else {
|
||||
let entry_name = format!("{}/{}", prefix_paths, &filename);
|
||||
output.text.push_str(&entry_name);
|
||||
text.push_str(&entry_name);
|
||||
directory_stack.push((entry.path.clone(), entry_name, entry_start));
|
||||
}
|
||||
output.text.push('\n');
|
||||
text.push('\n');
|
||||
} else if entry.is_file() {
|
||||
let Some(open_buffer_task) = project_handle
|
||||
.update(&mut cx, |project, cx| {
|
||||
@@ -306,13 +340,28 @@ fn collect_files(
|
||||
continue;
|
||||
};
|
||||
if let Some(buffer) = open_buffer_task.await.log_err() {
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
|
||||
append_buffer_to_output(
|
||||
&snapshot,
|
||||
let buffer_snapshot =
|
||||
cx.read_model(&buffer, |buffer, _| buffer.snapshot())?;
|
||||
let prev_len = text.len();
|
||||
collect_file_content(
|
||||
&mut text,
|
||||
&buffer_snapshot,
|
||||
path_including_worktree_name.to_string_lossy().to_string(),
|
||||
);
|
||||
text.push('\n');
|
||||
if !write_single_file_diagnostics(
|
||||
&mut text,
|
||||
Some(&path_including_worktree_name),
|
||||
&mut output,
|
||||
)
|
||||
.log_err();
|
||||
&buffer_snapshot,
|
||||
) {
|
||||
text.pop();
|
||||
}
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: prev_len..text.len(),
|
||||
path: path_including_worktree_name,
|
||||
entry_type: EntryType::File,
|
||||
});
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -322,30 +371,43 @@ fn collect_files(
|
||||
let mut root_path = PathBuf::new();
|
||||
root_path.push(snapshot.root_name());
|
||||
root_path.push(&dir);
|
||||
output.sections.push(build_entry_output_section(
|
||||
start..output.text.len(),
|
||||
Some(&root_path),
|
||||
true,
|
||||
None,
|
||||
));
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: start..text.len(),
|
||||
path: root_path,
|
||||
entry_type: EntryType::Directory,
|
||||
});
|
||||
} else {
|
||||
output.sections.push(build_entry_output_section(
|
||||
start..output.text.len(),
|
||||
Some(&PathBuf::from(entry.as_str())),
|
||||
true,
|
||||
None,
|
||||
));
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: start..text.len(),
|
||||
path: PathBuf::from(entry.as_str()),
|
||||
entry_type: EntryType::Directory,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
Ok(FileCommandOutput {
|
||||
completion_text: text,
|
||||
files: ranges,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn codeblock_fence_for_path(
|
||||
path: Option<&Path>,
|
||||
row_range: Option<RangeInclusive<u32>>,
|
||||
) -> String {
|
||||
fn collect_file_content(buffer: &mut String, snapshot: &BufferSnapshot, filename: String) {
|
||||
let mut content = snapshot.text();
|
||||
LineEnding::normalize(&mut content);
|
||||
buffer.reserve(filename.len() + content.len() + 9);
|
||||
buffer.push_str(&codeblock_fence_for_path(
|
||||
Some(&PathBuf::from(filename)),
|
||||
None,
|
||||
));
|
||||
buffer.push_str(&content);
|
||||
if !buffer.ends_with('\n') {
|
||||
buffer.push('\n');
|
||||
}
|
||||
buffer.push_str("```");
|
||||
}
|
||||
|
||||
pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option<Range<u32>>) -> String {
|
||||
let mut text = String::new();
|
||||
write!(text, "```").unwrap();
|
||||
|
||||
@@ -360,18 +422,13 @@ pub fn codeblock_fence_for_path(
|
||||
}
|
||||
|
||||
if let Some(row_range) = row_range {
|
||||
write!(text, ":{}-{}", row_range.start() + 1, row_range.end() + 1).unwrap();
|
||||
write!(text, ":{}-{}", row_range.start + 1, row_range.end + 1).unwrap();
|
||||
}
|
||||
|
||||
text.push('\n');
|
||||
text
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct FileCommandMetadata {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
pub fn build_entry_output_section(
|
||||
range: Range<usize>,
|
||||
path: Option<&Path>,
|
||||
@@ -397,16 +454,6 @@ pub fn build_entry_output_section(
|
||||
range,
|
||||
icon,
|
||||
label: label.into(),
|
||||
metadata: if is_directory {
|
||||
None
|
||||
} else {
|
||||
path.and_then(|path| {
|
||||
serde_json::to_value(FileCommandMetadata {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -492,36 +539,6 @@ mod custom_path_matcher {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_buffer_to_output(
|
||||
buffer: &BufferSnapshot,
|
||||
path: Option<&Path>,
|
||||
output: &mut SlashCommandOutput,
|
||||
) -> Result<()> {
|
||||
let prev_len = output.text.len();
|
||||
|
||||
let mut content = buffer.text();
|
||||
LineEnding::normalize(&mut content);
|
||||
output.text.push_str(&codeblock_fence_for_path(path, None));
|
||||
output.text.push_str(&content);
|
||||
if !output.text.ends_with('\n') {
|
||||
output.text.push('\n');
|
||||
}
|
||||
output.text.push_str("```");
|
||||
output.text.push('\n');
|
||||
|
||||
let section_ix = output.sections.len();
|
||||
collect_buffer_diagnostics(output, buffer, false);
|
||||
|
||||
output.sections.insert(
|
||||
section_ix,
|
||||
build_entry_output_section(prev_len..output.text.len(), path, false, None),
|
||||
);
|
||||
|
||||
output.text.push('\n');
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use fs::FakeFs;
|
||||
@@ -574,9 +591,9 @@ mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result_1.text.starts_with("root/dir"));
|
||||
assert!(result_1.completion_text.starts_with("root/dir"));
|
||||
// 4 files + 2 directories
|
||||
assert_eq!(result_1.sections.len(), 6);
|
||||
assert_eq!(6, result_1.files.len());
|
||||
|
||||
let result_2 = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
|
||||
@@ -590,9 +607,9 @@ mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.text.starts_with("root/dir"));
|
||||
assert!(result.completion_text.starts_with("root/dir"));
|
||||
// 5 files + 2 directories
|
||||
assert_eq!(result.sections.len(), 7);
|
||||
assert_eq!(7, result.files.len());
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
drop(project);
|
||||
@@ -637,27 +654,36 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
// Sanity check
|
||||
assert!(result.text.starts_with("zed/assets/themes\n"));
|
||||
assert_eq!(result.sections.len(), 7);
|
||||
assert!(result.completion_text.starts_with("zed/assets/themes\n"));
|
||||
assert_eq!(7, result.files.len());
|
||||
|
||||
// Ensure that full file paths are included in the real output
|
||||
assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE"));
|
||||
assert!(result.text.contains("zed/assets/themes/ayu/LICENSE"));
|
||||
assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE"));
|
||||
assert!(result
|
||||
.completion_text
|
||||
.contains("zed/assets/themes/andromeda/LICENSE"));
|
||||
assert!(result
|
||||
.completion_text
|
||||
.contains("zed/assets/themes/ayu/LICENSE"));
|
||||
assert!(result
|
||||
.completion_text
|
||||
.contains("zed/assets/themes/summercamp/LICENSE"));
|
||||
|
||||
assert_eq!(result.sections[5].label, "summercamp");
|
||||
assert_eq!("summercamp", result.files[5].path.to_string_lossy());
|
||||
|
||||
// Ensure that things are in descending order, with properly relativized paths
|
||||
assert_eq!(
|
||||
result.sections[0].label,
|
||||
"zed/assets/themes/andromeda/LICENSE"
|
||||
"zed/assets/themes/andromeda/LICENSE",
|
||||
result.files[0].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(result.sections[1].label, "andromeda");
|
||||
assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE");
|
||||
assert_eq!(result.sections[3].label, "ayu");
|
||||
assert_eq!("andromeda", result.files[1].path.to_string_lossy());
|
||||
assert_eq!(
|
||||
result.sections[4].label,
|
||||
"zed/assets/themes/summercamp/LICENSE"
|
||||
"zed/assets/themes/ayu/LICENSE",
|
||||
result.files[2].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!("ayu", result.files[3].path.to_string_lossy());
|
||||
assert_eq!(
|
||||
"zed/assets/themes/summercamp/LICENSE",
|
||||
result.files[4].path.to_string_lossy()
|
||||
);
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
@@ -697,24 +723,27 @@ mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.text.starts_with("zed/assets/themes\n"));
|
||||
assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE");
|
||||
assert!(result.completion_text.starts_with("zed/assets/themes\n"));
|
||||
assert_eq!(
|
||||
result.sections[1].label,
|
||||
"zed/assets/themes/summercamp/LICENSE"
|
||||
"zed/assets/themes/LICENSE",
|
||||
result.files[0].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(
|
||||
result.sections[2].label,
|
||||
"zed/assets/themes/summercamp/subdir/LICENSE"
|
||||
"zed/assets/themes/summercamp/LICENSE",
|
||||
result.files[1].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(
|
||||
result.sections[3].label,
|
||||
"zed/assets/themes/summercamp/subdir/subsubdir/LICENSE"
|
||||
"zed/assets/themes/summercamp/subdir/LICENSE",
|
||||
result.files[2].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(result.sections[4].label, "subsubdir");
|
||||
assert_eq!(result.sections[5].label, "subdir");
|
||||
assert_eq!(result.sections[6].label, "summercamp");
|
||||
assert_eq!(result.sections[7].label, "zed/assets/themes");
|
||||
assert_eq!(
|
||||
"zed/assets/themes/summercamp/subdir/subsubdir/LICENSE",
|
||||
result.files[3].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!("subsubdir", result.files[4].path.to_string_lossy());
|
||||
assert_eq!("subdir", result.files[5].path.to_string_lossy());
|
||||
assert_eq!("summercamp", result.files[6].path.to_string_lossy());
|
||||
assert_eq!("zed/assets/themes", result.files[7].path.to_string_lossy());
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
drop(project);
|
||||
|
||||
@@ -7,7 +7,7 @@ use assistant_slash_command::{
|
||||
};
|
||||
use chrono::Local;
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -19,11 +19,11 @@ impl SlashCommand for NowSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert current date and time".into()
|
||||
"insert the current date and time".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Current Date and Time".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -43,8 +43,6 @@ impl SlashCommand for NowSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
_cx: &mut WindowContext,
|
||||
@@ -59,7 +57,6 @@ impl SlashCommand for NowSlashCommand {
|
||||
range,
|
||||
icon: IconName::CountdownTimer,
|
||||
label: now.to_rfc2822().into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}))
|
||||
|
||||
@@ -1,39 +1,90 @@
|
||||
use super::{
|
||||
create_label_for_command, search_command::add_search_result_section, SlashCommand,
|
||||
SlashCommandOutput,
|
||||
};
|
||||
use crate::PromptBuilder;
|
||||
use anyhow::{anyhow, Result};
|
||||
use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use feature_flags::FeatureFlag;
|
||||
use gpui::{AppContext, Task, WeakView, WindowContext};
|
||||
use language::{Anchor, CodeLabel, LspAdapterDelegate};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelTool};
|
||||
use schemars::JsonSchema;
|
||||
use semantic_index::SemanticDb;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub struct ProjectSlashCommandFeatureFlag;
|
||||
|
||||
impl FeatureFlag for ProjectSlashCommandFeatureFlag {
|
||||
const NAME: &'static str = "project-slash-command";
|
||||
}
|
||||
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Model, Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{
|
||||
fmt::Write as _,
|
||||
ops::DerefMut,
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::{BorrowAppContext as _, IconName};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct ProjectSlashCommand {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
pub(crate) struct ProjectSlashCommand;
|
||||
|
||||
impl ProjectSlashCommand {
|
||||
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
|
||||
Self { prompt_builder }
|
||||
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
|
||||
let buffer = fs.load(path_to_cargo_toml).await?;
|
||||
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
|
||||
|
||||
let mut message = String::new();
|
||||
writeln!(message, "You are in a Rust project.")?;
|
||||
|
||||
if let Some(workspace) = cargo_toml.workspace {
|
||||
writeln!(
|
||||
message,
|
||||
"The project is a Cargo workspace with the following members:"
|
||||
)?;
|
||||
for member in workspace.members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
|
||||
if !workspace.default_members.is_empty() {
|
||||
writeln!(message, "The default members are:")?;
|
||||
for member in workspace.default_members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !workspace.dependencies.is_empty() {
|
||||
writeln!(
|
||||
message,
|
||||
"The following workspace dependencies are installed:"
|
||||
)?;
|
||||
for dependency in workspace.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
} else if let Some(package) = cargo_toml.package {
|
||||
writeln!(
|
||||
message,
|
||||
"The project name is \"{name}\".",
|
||||
name = package.name
|
||||
)?;
|
||||
|
||||
let description = package
|
||||
.description
|
||||
.as_ref()
|
||||
.and_then(|description| description.get().ok().cloned());
|
||||
if let Some(description) = description.as_ref() {
|
||||
writeln!(message, "It describes itself as \"{description}\".")?;
|
||||
}
|
||||
|
||||
if !cargo_toml.dependencies.is_empty() {
|
||||
writeln!(message, "The following dependencies are installed:")?;
|
||||
for dependency in cargo_toml.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path("Cargo.toml")?;
|
||||
let path = ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
};
|
||||
Some(Arc::from(
|
||||
project.read(cx).absolute_path(&path, cx)?.as_path(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,20 +93,12 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
"project".into()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &AppContext) -> CodeLabel {
|
||||
create_label_for_command("project", &[], cx)
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Generate a semantic search based on context".into()
|
||||
"insert project metadata".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
"Insert Project Metadata".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
@@ -65,126 +108,43 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
|
||||
context_buffer: language::BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let current_model = model_registry.active_model();
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let fs = workspace.project().read(cx).fs().clone();
|
||||
let path = Self::path_to_cargo_toml(project, cx);
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
let path = path.with_context(|| "Cargo.toml not found")?;
|
||||
Self::build_message(fs, &path).await
|
||||
});
|
||||
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
|
||||
};
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let Some(project_index) =
|
||||
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
|
||||
else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
|
||||
|
||||
let prompt =
|
||||
prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
|
||||
|
||||
let search_queries = current_model
|
||||
.use_tool::<SearchQueries>(
|
||||
language_model::LanguageModelRequest {
|
||||
messages: vec![language_model::LanguageModelRequestMessage {
|
||||
role: language_model::Role::User,
|
||||
content: vec![language_model::MessageContent::Text(prompt)],
|
||||
cache: false,
|
||||
}],
|
||||
tools: vec![],
|
||||
stop: vec![],
|
||||
temperature: None,
|
||||
},
|
||||
cx.deref_mut(),
|
||||
)
|
||||
.await?
|
||||
.search_queries;
|
||||
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
project_index.search(search_queries.clone(), 25, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let results = SemanticDb::load_results(results, &fs, &cx).await?;
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut output = "Project context:\n".to_string();
|
||||
let mut sections = Vec::new();
|
||||
|
||||
for (ix, query) in search_queries.into_iter().enumerate() {
|
||||
let start_ix = output.len();
|
||||
writeln!(&mut output, "Results for {query}:").unwrap();
|
||||
let mut has_results = false;
|
||||
for result in &results {
|
||||
if result.query_index == ix {
|
||||
add_search_result_section(result, &mut output, &mut sections);
|
||||
has_results = true;
|
||||
}
|
||||
}
|
||||
if has_results {
|
||||
sections.push(SlashCommandOutputSection {
|
||||
range: start_ix..output.len(),
|
||||
icon: IconName::MagnifyingGlass,
|
||||
label: query.into(),
|
||||
metadata: None,
|
||||
});
|
||||
output.push('\n');
|
||||
} else {
|
||||
output.truncate(start_ix);
|
||||
}
|
||||
}
|
||||
|
||||
sections.push(SlashCommandOutputSection {
|
||||
range: 0..output.len(),
|
||||
icon: IconName::Book,
|
||||
label: "Project context".into(),
|
||||
metadata: None,
|
||||
});
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text: output,
|
||||
sections,
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = output.await?;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::FileTree,
|
||||
label: "Project".into(),
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(JsonSchema, Deserialize)]
|
||||
struct SearchQueries {
|
||||
/// An array of semantic search queries.
|
||||
///
|
||||
/// These queries will be used to search the user's codebase.
|
||||
/// The function can only accept 4 queries, otherwise it will error.
|
||||
/// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
|
||||
search_queries: Vec<String>,
|
||||
}
|
||||
|
||||
impl LanguageModelTool for SearchQueries {
|
||||
fn name() -> String {
|
||||
"search_queries".to_string()
|
||||
}
|
||||
|
||||
fn description() -> String {
|
||||
"Generate semantic search queries based on context".to_string()
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
@@ -16,11 +16,11 @@ impl SlashCommand for PromptSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert prompt from library".into()
|
||||
"insert prompt from library".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Prompt from Library".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -56,8 +56,6 @@ impl SlashCommand for PromptSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -97,7 +95,6 @@ impl SlashCommand for PromptSlashCommand {
|
||||
range,
|
||||
icon: IconName::Library,
|
||||
label: title,
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
|
||||
@@ -7,13 +7,15 @@ use anyhow::Result;
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use feature_flags::FeatureFlag;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use semantic_index::{LoadedSearchResult, SemanticDb};
|
||||
use language::{CodeLabel, LineEnding, LspAdapterDelegate};
|
||||
use semantic_index::SemanticDb;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::PathBuf,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::{prelude::*, IconName};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct SearchSlashCommandFeatureFlag;
|
||||
@@ -34,11 +36,11 @@ impl SlashCommand for SearchSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Search your project semantically".into()
|
||||
"semantic search".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Semantic Search".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -58,8 +60,6 @@ impl SlashCommand for SearchSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: language::BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -101,19 +101,66 @@ impl SlashCommand for SearchSlashCommand {
|
||||
cx.spawn(|cx| async move {
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
|
||||
project_index.search(query.clone(), limit.unwrap_or(5), cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let loaded_results = SemanticDb::load_results(results, &fs, &cx).await?;
|
||||
let mut loaded_results = Vec::new();
|
||||
for result in results {
|
||||
let (full_path, file_content) =
|
||||
result.worktree.read_with(&cx, |worktree, _cx| {
|
||||
let entry_abs_path = worktree.abs_path().join(&result.path);
|
||||
let mut entry_full_path = PathBuf::from(worktree.root_name());
|
||||
entry_full_path.push(&result.path);
|
||||
let file_content = async {
|
||||
let entry_abs_path = entry_abs_path;
|
||||
fs.load(&entry_abs_path).await
|
||||
};
|
||||
(entry_full_path, file_content)
|
||||
})?;
|
||||
if let Some(file_content) = file_content.await.log_err() {
|
||||
loaded_results.push((result, full_path, file_content));
|
||||
}
|
||||
}
|
||||
|
||||
let output = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let mut text = format!("Search results for {query}:\n");
|
||||
let mut sections = Vec::new();
|
||||
for loaded_result in &loaded_results {
|
||||
add_search_result_section(loaded_result, &mut text, &mut sections);
|
||||
for (result, full_path, file_content) in loaded_results {
|
||||
let range_start = result.range.start.min(file_content.len());
|
||||
let range_end = result.range.end.min(file_content.len());
|
||||
|
||||
let start_row = file_content[0..range_start].matches('\n').count() as u32;
|
||||
let end_row = file_content[0..range_end].matches('\n').count() as u32;
|
||||
let start_line_byte_offset = file_content[0..range_start]
|
||||
.rfind('\n')
|
||||
.map(|pos| pos + 1)
|
||||
.unwrap_or_default();
|
||||
let end_line_byte_offset = file_content[range_end..]
|
||||
.find('\n')
|
||||
.map(|pos| range_end + pos)
|
||||
.unwrap_or_else(|| file_content.len());
|
||||
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(
|
||||
Some(&result.path),
|
||||
Some(start_row..end_row),
|
||||
));
|
||||
|
||||
let mut excerpt =
|
||||
file_content[start_line_byte_offset..end_line_byte_offset].to_string();
|
||||
LineEnding::normalize(&mut excerpt);
|
||||
text.push_str(&excerpt);
|
||||
writeln!(text, "\n```\n").unwrap();
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
Some(&full_path),
|
||||
false,
|
||||
Some(start_row + 1..end_row + 1),
|
||||
));
|
||||
}
|
||||
|
||||
let query = SharedString::from(query);
|
||||
@@ -121,7 +168,6 @@ impl SlashCommand for SearchSlashCommand {
|
||||
range: 0..text.len(),
|
||||
icon: IconName::MagnifyingGlass,
|
||||
label: query,
|
||||
metadata: None,
|
||||
});
|
||||
|
||||
SlashCommandOutput {
|
||||
@@ -136,35 +182,3 @@ impl SlashCommand for SearchSlashCommand {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_search_result_section(
|
||||
loaded_result: &LoadedSearchResult,
|
||||
text: &mut String,
|
||||
sections: &mut Vec<SlashCommandOutputSection<usize>>,
|
||||
) {
|
||||
let LoadedSearchResult {
|
||||
path,
|
||||
full_path,
|
||||
excerpt_content,
|
||||
row_range,
|
||||
..
|
||||
} = loaded_result;
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(
|
||||
Some(&path),
|
||||
Some(row_range.clone()),
|
||||
));
|
||||
|
||||
text.push_str(&excerpt_content);
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```\n").unwrap();
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
Some(&full_path),
|
||||
false,
|
||||
Some(row_range.start() + 1..row_range.end() + 1),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use editor::Editor;
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::Arc;
|
||||
use std::{path::Path, sync::atomic::AtomicBool};
|
||||
use ui::{IconName, WindowContext};
|
||||
@@ -17,11 +17,11 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert symbols for active tab".into()
|
||||
"insert symbols for active tab".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Symbols for Active Tab".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
@@ -41,8 +41,6 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -79,7 +77,6 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
range: 0..outline_text.len(),
|
||||
icon: IconName::ListTree,
|
||||
label: path.to_string_lossy().to_string().into(),
|
||||
metadata: None,
|
||||
}],
|
||||
text: outline_text,
|
||||
run_commands_in_text: false,
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput};
|
||||
use super::{
|
||||
diagnostics_command::write_single_file_diagnostics,
|
||||
file_command::{build_entry_output_section, codeblock_fence_for_path},
|
||||
SlashCommand, SlashCommandOutput,
|
||||
};
|
||||
use anyhow::{Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::Editor;
|
||||
use futures::future::join_all;
|
||||
use gpui::{Entity, Task, WeakView};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::PathBuf,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::{ActiveTheme, WindowContext};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct TabSlashCommand;
|
||||
@@ -24,11 +28,11 @@ impl SlashCommand for TabSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert open tabs (active tab by default)".to_owned()
|
||||
"insert open tabs (active tab by default)".to_owned()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Open Tabs".to_owned()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -127,8 +131,6 @@ impl SlashCommand for TabSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -142,11 +144,40 @@ impl SlashCommand for TabSlashCommand {
|
||||
);
|
||||
|
||||
cx.background_executor().spawn(async move {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
let mut sections = Vec::new();
|
||||
let mut text = String::new();
|
||||
let mut has_diagnostics = false;
|
||||
for (full_path, buffer, _) in tab_items_search.await? {
|
||||
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
|
||||
for chunk in buffer.as_rope().chunks() {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```").unwrap();
|
||||
if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
|
||||
has_diagnostics = true;
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
full_path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
));
|
||||
}
|
||||
Ok(output)
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections,
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use gpui::{AppContext, Task, View, WeakView};
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use ui::prelude::*;
|
||||
use workspace::{dock::Panel, Workspace};
|
||||
@@ -29,11 +29,11 @@ impl SlashCommand for TerminalSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert terminal output".into()
|
||||
"insert terminal output".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Terminal Output".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -57,8 +57,6 @@ impl SlashCommand for TerminalSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -93,7 +91,6 @@ impl SlashCommand for TerminalSlashCommand {
|
||||
range,
|
||||
icon: IconName::Terminal,
|
||||
label: "Terminal".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}))
|
||||
|
||||
@@ -8,7 +8,7 @@ use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
|
||||
use workspace::Workspace;
|
||||
@@ -29,11 +29,11 @@ impl SlashCommand for WorkflowSlashCommand {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert prompt to opt into the edit workflow".into()
|
||||
"insert a prompt that opts into the edit workflow".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
"Insert Workflow Prompt".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -53,8 +53,6 @@ impl SlashCommand for WorkflowSlashCommand {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -70,7 +68,6 @@ impl SlashCommand for WorkflowSlashCommand {
|
||||
range,
|
||||
icon: IconName::Route,
|
||||
label: "Workflow".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
|
||||
@@ -184,7 +184,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
h_flex()
|
||||
.group(format!("command-entry-label-{ix}"))
|
||||
.w_full()
|
||||
.min_w(px(250.))
|
||||
.min_w(px(220.))
|
||||
.child(
|
||||
v_flex()
|
||||
.child(
|
||||
@@ -203,9 +203,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
div()
|
||||
.font_buffer(cx)
|
||||
.child(
|
||||
Label::new(args)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
Label::new(args).size(LabelSize::Small),
|
||||
)
|
||||
.visible_on_hover(format!(
|
||||
"command-entry-label-{ix}"
|
||||
|
||||
@@ -10,9 +10,9 @@ pub struct SlashCommandSettings {
|
||||
/// Settings for the `/docs` slash command.
|
||||
#[serde(default)]
|
||||
pub docs: DocsCommandSettings,
|
||||
/// Settings for the `/cargo-workspace` slash command.
|
||||
/// Settings for the `/project` slash command.
|
||||
#[serde(default)]
|
||||
pub cargo_workspace: CargoWorkspaceCommandSettings,
|
||||
pub project: ProjectCommandSettings,
|
||||
}
|
||||
|
||||
/// Settings for the `/docs` slash command.
|
||||
@@ -23,10 +23,10 @@ pub struct DocsCommandSettings {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
/// Settings for the `/cargo-workspace` slash command.
|
||||
/// Settings for the `/project` slash command.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct CargoWorkspaceCommandSettings {
|
||||
/// Whether `/cargo-workspace` is enabled.
|
||||
pub struct ProjectCommandSettings {
|
||||
/// Whether `/project` is enabled.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
}
|
||||
@@ -38,10 +38,7 @@ impl Settings for SlashCommandSettings {
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut AppContext) -> Result<Self> {
|
||||
SettingsSources::<Self::FileContent>::json_merge_with(
|
||||
[sources.default]
|
||||
.into_iter()
|
||||
.chain(sources.user)
|
||||
.chain(sources.server),
|
||||
[sources.default].into_iter().chain(sources.user),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ use std::{
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal::Terminal;
|
||||
use terminal_view::TerminalView;
|
||||
use theme::ThemeSettings;
|
||||
@@ -285,7 +284,7 @@ impl TerminalInlineAssistant {
|
||||
messages,
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
temperature: 1.0,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -414,7 +413,7 @@ impl TerminalInlineAssist {
|
||||
struct InlineAssistantError;
|
||||
|
||||
let id =
|
||||
NotificationId::composite::<InlineAssistantError>(
|
||||
NotificationId::identified::<InlineAssistantError>(
|
||||
assist_id.0,
|
||||
);
|
||||
|
||||
@@ -571,7 +570,7 @@ impl Render for PromptEditor {
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.py_2()
|
||||
.py_1p5()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
@@ -950,11 +949,12 @@ impl PromptEditor {
|
||||
} else {
|
||||
cx.theme().colors().text
|
||||
},
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_fallbacks: settings.buffer_font.fallbacks.clone(),
|
||||
font_size: settings.buffer_font_size.into(),
|
||||
font_weight: settings.buffer_font.weight,
|
||||
line_height: relative(settings.buffer_line_height.value()),
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_fallbacks: settings.ui_font.fallbacks.clone(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
line_height: relative(1.3),
|
||||
..Default::default()
|
||||
};
|
||||
EditorElement::new(
|
||||
@@ -1040,7 +1040,6 @@ impl Codegen {
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let model_provider_id = model.provider_id();
|
||||
let response = model.stream_completion_text(prompt, &cx).await;
|
||||
let generate = async {
|
||||
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
|
||||
@@ -1065,16 +1064,13 @@ impl Codegen {
|
||||
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry.report_assistant_event(AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
telemetry.report_assistant_event(
|
||||
None,
|
||||
telemetry_events::AssistantKind::Inline,
|
||||
model_telemetry_id,
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: None,
|
||||
});
|
||||
);
|
||||
}
|
||||
|
||||
result?;
|
||||
|
||||
@@ -187,7 +187,6 @@ impl WorkflowSuggestion {
|
||||
suggestion_range,
|
||||
initial_prompt,
|
||||
initial_transaction_id,
|
||||
false,
|
||||
Some(workspace.clone()),
|
||||
Some(assistant_panel),
|
||||
cx,
|
||||
|
||||
@@ -19,5 +19,4 @@ gpui.workspace = true
|
||||
language.workspace = true
|
||||
parking_lot.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
@@ -2,7 +2,7 @@ mod slash_command_registry;
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use slash_command_registry::*;
|
||||
use std::{
|
||||
@@ -77,8 +77,6 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
// TODO: We're just using the `LspAdapterDelegate` here because that is
|
||||
// what the extension API is already expecting.
|
||||
@@ -96,7 +94,7 @@ pub type RenderFoldPlaceholder = Arc<
|
||||
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
|
||||
>;
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SlashCommandOutput {
|
||||
pub text: String,
|
||||
pub sections: Vec<SlashCommandOutputSection<usize>>,
|
||||
@@ -108,11 +106,4 @@ pub struct SlashCommandOutputSection<T> {
|
||||
pub range: Range<T>,
|
||||
pub icon: IconName,
|
||||
pub label: SharedString,
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl SlashCommandOutputSection<language::Anchor> {
|
||||
pub fn is_valid(&self, buffer: &language::TextBuffer) -> bool {
|
||||
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,5 +18,5 @@ collections.workspace = true
|
||||
derive_more.workspace = true
|
||||
gpui.workspace = true
|
||||
parking_lot.workspace = true
|
||||
rodio = { version = "0.19.0", default-features = false, features = ["wav"] }
|
||||
rodio = { version = "0.17.1", default-features = false, features = ["wav"] }
|
||||
util.workspace = true
|
||||
|
||||
@@ -19,6 +19,7 @@ db.workspace = true
|
||||
editor.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
log.workspace = true
|
||||
markdown_preview.workspace = true
|
||||
menu.workspace = true
|
||||
|
||||
@@ -9,6 +9,7 @@ use gpui::{
|
||||
actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext,
|
||||
SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext,
|
||||
};
|
||||
use isahc::AsyncBody;
|
||||
|
||||
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
|
||||
use schemars::JsonSchema;
|
||||
@@ -19,7 +20,7 @@ use smol::{fs, io::AsyncReadExt};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use smol::{fs::File, process::Command};
|
||||
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use http_client::{HttpClient, HttpClientWithUrl};
|
||||
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
|
||||
use std::{
|
||||
env::{
|
||||
@@ -130,7 +131,7 @@ impl Settings for AutoUpdateSetting {
|
||||
type FileContent = Option<AutoUpdateSettingContent>;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let auto_update = [sources.server, sources.release_channel, sources.user]
|
||||
let auto_update = [sources.release_channel, sources.user]
|
||||
.into_iter()
|
||||
.find_map(|value| value.copied().flatten())
|
||||
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
|
||||
@@ -243,44 +244,29 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<(
|
||||
let auto_updater = AutoUpdater::get(cx)?;
|
||||
let release_channel = ReleaseChannel::try_global(cx)?;
|
||||
|
||||
match release_channel {
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => {
|
||||
let auto_updater = auto_updater.read(cx);
|
||||
let current_version = auto_updater.current_version;
|
||||
let release_channel = release_channel.dev_name();
|
||||
let path = format!("/releases/{release_channel}/{current_version}");
|
||||
let url = &auto_updater.http_client.build_url(&path);
|
||||
cx.open_url(url);
|
||||
}
|
||||
ReleaseChannel::Nightly => {
|
||||
cx.open_url("https://github.com/zed-industries/zed/commits/nightly/");
|
||||
}
|
||||
ReleaseChannel::Dev => {
|
||||
cx.open_url("https://github.com/zed-industries/zed/commits/main/");
|
||||
}
|
||||
if matches!(
|
||||
release_channel,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview
|
||||
) {
|
||||
let auto_updater = auto_updater.read(cx);
|
||||
let release_channel = release_channel.dev_name();
|
||||
let current_version = auto_updater.current_version;
|
||||
let url = &auto_updater
|
||||
.http_client
|
||||
.build_url(&format!("/releases/{release_channel}/{current_version}"));
|
||||
cx.open_url(url);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
|
||||
let release_channel = ReleaseChannel::global(cx);
|
||||
|
||||
let url = match release_channel {
|
||||
ReleaseChannel::Nightly => Some("https://github.com/zed-industries/zed/commits/nightly/"),
|
||||
ReleaseChannel::Dev => Some("https://github.com/zed-industries/zed/commits/main/"),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(url) = url {
|
||||
cx.open_url(url);
|
||||
return;
|
||||
}
|
||||
|
||||
let version = AppVersion::global(cx).to_string();
|
||||
|
||||
let client = client::Client::global(cx).http_client();
|
||||
let url = client.build_url(&format!(
|
||||
"/api/release_notes/v2/{}/{}",
|
||||
"/api/release_notes/{}/{}",
|
||||
release_channel.dev_name(),
|
||||
version
|
||||
));
|
||||
@@ -357,17 +343,15 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
|
||||
let should_show_notification = should_show_notification.await?;
|
||||
if should_show_notification {
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
workspace.show_notification(
|
||||
NotificationId::unique::<UpdateNotification>(),
|
||||
cx,
|
||||
|cx| cx.new_view(|_| UpdateNotification::new(version, workspace_handle)),
|
||||
|cx| cx.new_view(|_| UpdateNotification::new(version)),
|
||||
);
|
||||
updater.update(cx, |updater, cx| {
|
||||
updater
|
||||
.set_should_show_update_notification(false, cx)
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
updater
|
||||
.read(cx)
|
||||
.set_should_show_update_notification(false, cx)
|
||||
.detach_and_log_err(cx);
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
@@ -464,7 +448,6 @@ impl AutoUpdater {
|
||||
smol::fs::create_dir_all(&platform_dir).await.ok();
|
||||
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
|
||||
if smol::fs::metadata(&version_path).await.is_err() {
|
||||
log::info!("downloading zed-remote-server {os} {arch}");
|
||||
download_remote_server_binary(&version_path, release, client, cx).await?;
|
||||
|
||||
@@ -1,18 +1,13 @@
|
||||
use gpui::{
|
||||
div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render,
|
||||
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, WeakView,
|
||||
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext,
|
||||
};
|
||||
use menu::Cancel;
|
||||
use release_channel::ReleaseChannel;
|
||||
use util::ResultExt;
|
||||
use workspace::{
|
||||
ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt},
|
||||
Workspace,
|
||||
};
|
||||
use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt};
|
||||
|
||||
pub struct UpdateNotification {
|
||||
version: SemanticVersion,
|
||||
workspace: WeakView<Workspace>,
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for UpdateNotification {}
|
||||
@@ -46,11 +41,7 @@ impl Render for UpdateNotification {
|
||||
.child(Label::new("View the release notes"))
|
||||
.cursor_pointer()
|
||||
.on_click(cx.listener(|this, _, cx| {
|
||||
this.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
crate::view_release_notes_locally(workspace, cx);
|
||||
})
|
||||
.log_err();
|
||||
crate::view_release_notes(&Default::default(), cx);
|
||||
this.dismiss(&menu::Cancel, cx)
|
||||
})),
|
||||
)
|
||||
@@ -58,8 +49,8 @@ impl Render for UpdateNotification {
|
||||
}
|
||||
|
||||
impl UpdateNotification {
|
||||
pub fn new(version: SemanticVersion, workspace: WeakView<Workspace>) -> Self {
|
||||
Self { version, workspace }
|
||||
pub fn new(version: SemanticVersion) -> Self {
|
||||
Self { version }
|
||||
}
|
||||
|
||||
pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
Element, EventEmitter, FocusableView, IntoElement, ParentElement, Render, StyledText,
|
||||
Subscription, ViewContext,
|
||||
Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription,
|
||||
ViewContext,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use std::cmp;
|
||||
@@ -90,30 +90,17 @@ impl Render for Breadcrumbs {
|
||||
ButtonLike::new("toggle outline view")
|
||||
.child(breadcrumbs_stack)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
move |_, cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
|
||||
}
|
||||
.on_click(move |_, cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
|
||||
}
|
||||
})
|
||||
.tooltip(move |cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
let focus_handle = editor.read(cx).focus_handle(cx);
|
||||
Tooltip::for_action_in(
|
||||
"Show symbol outline",
|
||||
&editor::actions::ToggleOutline,
|
||||
&focus_handle,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Tooltip::for_action(
|
||||
"Show symbol outline",
|
||||
&editor::actions::ToggleOutline,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
.tooltip(|cx| {
|
||||
Tooltip::for_action(
|
||||
"Show symbol outline",
|
||||
&editor::actions::ToggleOutline,
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
),
|
||||
None => element
|
||||
|
||||
@@ -1178,7 +1178,7 @@ impl Room {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.joined_projects.retain(|project| {
|
||||
if let Some(project) = project.upgrade() {
|
||||
!project.read(cx).is_disconnected(cx)
|
||||
!project.read(cx).is_disconnected()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -1200,7 +1200,6 @@ impl Room {
|
||||
room_id: self.id(),
|
||||
worktrees: vec![],
|
||||
dev_server_project_id: Some(dev_server_project_id.0),
|
||||
is_ssh_project: false,
|
||||
})
|
||||
} else {
|
||||
if let Some(project_id) = project.read(cx).remote_id() {
|
||||
@@ -1211,7 +1210,6 @@ impl Room {
|
||||
room_id: self.id(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
dev_server_project_id: None,
|
||||
is_ssh_project: project.read(cx).is_via_ssh(),
|
||||
})
|
||||
};
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ use collections::HashMap;
|
||||
use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task};
|
||||
use language::proto::serialize_version;
|
||||
use rpc::{
|
||||
proto::{self, PeerId},
|
||||
AnyProtoClient, TypedEnvelope,
|
||||
proto::{self, AnyProtoClient, PeerId},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use text::BufferId;
|
||||
@@ -66,7 +66,7 @@ impl ChannelBuffer {
|
||||
let capability = channel_store.read(cx).channel_capability(channel.id);
|
||||
language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text)
|
||||
})?;
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??;
|
||||
|
||||
let subscription = client.subscribe_to_entity(channel.id.0)?;
|
||||
|
||||
@@ -151,7 +151,7 @@ impl ChannelBuffer {
|
||||
cx.notify();
|
||||
this.buffer
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
|
||||
})?;
|
||||
})??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -171,14 +171,11 @@ impl ChannelBuffer {
|
||||
fn on_buffer_update(
|
||||
&mut self,
|
||||
_: Model<language::Buffer>,
|
||||
event: &language::BufferEvent,
|
||||
event: &language::Event,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
language::BufferEvent::Operation {
|
||||
operation,
|
||||
is_local: true,
|
||||
} => {
|
||||
language::Event::Operation(operation) => {
|
||||
if *ZED_ALWAYS_ACTIVE {
|
||||
if let language::Operation::UpdateSelections { selections, .. } = operation {
|
||||
if selections.is_empty() {
|
||||
@@ -194,7 +191,7 @@ impl ChannelBuffer {
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
language::BufferEvent::Edited => {
|
||||
language::Event::Edited => {
|
||||
cx.emit(ChannelBufferEvent::BufferEdited);
|
||||
}
|
||||
_ => {}
|
||||
|
||||
@@ -11,7 +11,7 @@ use gpui::{
|
||||
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use rpc::AnyProtoClient;
|
||||
use rpc::proto::AnyProtoClient;
|
||||
use std::{
|
||||
ops::{ControlFlow, Range},
|
||||
sync::Arc,
|
||||
@@ -332,7 +332,7 @@ impl ChannelChat {
|
||||
.update(&mut cx, |chat, cx| {
|
||||
if let Some(first_id) = chat.first_loaded_message_id() {
|
||||
if first_id <= message_id {
|
||||
let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&());
|
||||
let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>();
|
||||
let message_id = ChannelMessageId::Saved(message_id);
|
||||
cursor.seek(&message_id, Bias::Left, &());
|
||||
return ControlFlow::Break(
|
||||
@@ -498,7 +498,7 @@ impl ChannelChat {
|
||||
}
|
||||
|
||||
pub fn message(&self, ix: usize) -> &ChannelMessage {
|
||||
let mut cursor = self.messages.cursor::<Count>(&());
|
||||
let mut cursor = self.messages.cursor::<Count>();
|
||||
cursor.seek(&Count(ix), Bias::Right, &());
|
||||
cursor.item().unwrap()
|
||||
}
|
||||
@@ -515,13 +515,13 @@ impl ChannelChat {
|
||||
}
|
||||
|
||||
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
|
||||
let mut cursor = self.messages.cursor::<Count>(&());
|
||||
let mut cursor = self.messages.cursor::<Count>();
|
||||
cursor.seek(&Count(range.start), Bias::Right, &());
|
||||
cursor.take(range.len())
|
||||
}
|
||||
|
||||
pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>();
|
||||
cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &());
|
||||
cursor
|
||||
}
|
||||
@@ -589,11 +589,11 @@ impl ChannelChat {
|
||||
fn insert_messages(&mut self, messages: SumTree<ChannelMessage>, cx: &mut ModelContext<Self>) {
|
||||
if let Some((first_message, last_message)) = messages.first().zip(messages.last()) {
|
||||
let nonces = messages
|
||||
.cursor::<()>(&())
|
||||
.cursor::<()>()
|
||||
.map(|m| m.nonce)
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&());
|
||||
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>();
|
||||
let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &());
|
||||
let start_ix = old_cursor.start().1 .0;
|
||||
let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &());
|
||||
@@ -646,7 +646,7 @@ impl ChannelChat {
|
||||
}
|
||||
|
||||
fn message_removed(&mut self, id: u64, cx: &mut ModelContext<Self>) {
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>();
|
||||
let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &());
|
||||
if let Some(item) = cursor.item() {
|
||||
if item.id == ChannelMessageId::Saved(id) {
|
||||
@@ -685,7 +685,7 @@ impl ChannelChat {
|
||||
edited_at: Option<OffsetDateTime>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
|
||||
let mut cursor = self.messages.cursor::<ChannelMessageId>();
|
||||
let mut messages = cursor.slice(&id, Bias::Left, &());
|
||||
let ix = messages.summary().count;
|
||||
|
||||
@@ -716,7 +716,7 @@ async fn messages_from_proto(
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<SumTree<ChannelMessage>> {
|
||||
let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?;
|
||||
let mut result = SumTree::default();
|
||||
let mut result = SumTree::new();
|
||||
result.extend(messages, &());
|
||||
Ok(result)
|
||||
}
|
||||
@@ -808,7 +808,7 @@ pub fn mentions_to_proto(mentions: &[(Range<usize>, UserId)]) -> Vec<proto::Chat
|
||||
impl sum_tree::Item for ChannelMessage {
|
||||
type Summary = ChannelMessageSummary;
|
||||
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
fn summary(&self) -> Self::Summary {
|
||||
ChannelMessageSummary {
|
||||
max_id: self.id,
|
||||
count: 1,
|
||||
@@ -825,10 +825,6 @@ impl Default for ChannelMessageId {
|
||||
impl sum_tree::Summary for ChannelMessageSummary {
|
||||
type Context = ();
|
||||
|
||||
fn zero(_cx: &Self::Context) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
self.max_id = summary.max_id;
|
||||
self.count += summary.count;
|
||||
@@ -836,10 +832,6 @@ impl sum_tree::Summary for ChannelMessageSummary {
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId {
|
||||
fn zero(_cx: &()) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) {
|
||||
debug_assert!(summary.max_id > *self);
|
||||
*self = summary.max_id;
|
||||
@@ -847,10 +839,6 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId {
|
||||
}
|
||||
|
||||
impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count {
|
||||
fn zero(_cx: &()) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) {
|
||||
self.0 += summary.count;
|
||||
}
|
||||
|
||||
@@ -1007,7 +1007,7 @@ impl ChannelStore {
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
buffer.apply_ops(incoming_operations, cx);
|
||||
buffer.apply_ops(incoming_operations, cx)?;
|
||||
anyhow::Ok(outgoing_operations)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
@@ -58,32 +58,27 @@ struct Args {
|
||||
dev_server_token: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
|
||||
let canonicalized = match Path::new(argument_str).canonicalize() {
|
||||
Ok(existing_path) => PathWithPosition::from_path(existing_path),
|
||||
Err(_) => {
|
||||
let path = PathWithPosition::parse_str(argument_str);
|
||||
let curdir = env::current_dir().context("reteiving current directory")?;
|
||||
path.map_path(|path| match fs::canonicalize(&path) {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => {
|
||||
if let Some(mut parent) = path.parent() {
|
||||
if parent == Path::new("") {
|
||||
parent = &curdir
|
||||
}
|
||||
match fs::canonicalize(parent) {
|
||||
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
|
||||
Err(_) => Err(e),
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
fn parse_path_with_position(argument_str: &str) -> Result<String, std::io::Error> {
|
||||
let path = PathWithPosition::parse_str(argument_str);
|
||||
let curdir = env::current_dir()?;
|
||||
|
||||
let canonicalized = path.map_path(|path| match fs::canonicalize(&path) {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => {
|
||||
if let Some(mut parent) = path.parent() {
|
||||
if parent == Path::new("") {
|
||||
parent = &curdir
|
||||
}
|
||||
})
|
||||
match fs::canonicalize(parent) {
|
||||
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
|
||||
Err(_) => Err(e),
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
.with_context(|| format!("parsing as path with position {argument_str}"))?,
|
||||
};
|
||||
Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string()))
|
||||
})?;
|
||||
Ok(canonicalized.to_string(|path| path.display().to_string()))
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
|
||||
@@ -18,11 +18,12 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] }
|
||||
async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
@@ -33,9 +34,7 @@ parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
release_channel.workspace = true
|
||||
rpc = { workspace = true, features = ["gpui"] }
|
||||
rustls-native-certs.workspace = true
|
||||
rustls.workspace = true
|
||||
rpc.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -22,6 +22,7 @@ use gpui::{actions, AppContext, AsyncAppContext, Global, Model, Task, WeakModel}
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
use proto::{AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet};
|
||||
use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
@@ -141,7 +142,6 @@ impl Settings for ProxySettings {
|
||||
Ok(Self {
|
||||
proxy: sources
|
||||
.user
|
||||
.or(sources.server)
|
||||
.and_then(|value| value.proxy.clone())
|
||||
.or(sources.default.proxy.clone()),
|
||||
})
|
||||
@@ -241,6 +241,8 @@ pub enum EstablishConnectionError {
|
||||
#[error("{0}")]
|
||||
Other(#[from] anyhow::Error),
|
||||
#[error("{0}")]
|
||||
Http(#[from] http_client::Error),
|
||||
#[error("{0}")]
|
||||
InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue),
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
@@ -395,7 +397,7 @@ pub struct PendingEntitySubscription<T: 'static> {
|
||||
}
|
||||
|
||||
impl<T: 'static> PendingEntitySubscription<T> {
|
||||
pub fn set_model(mut self, model: &Model<T>, cx: &AsyncAppContext) -> Subscription {
|
||||
pub fn set_model(mut self, model: &Model<T>, cx: &mut AsyncAppContext) -> Subscription {
|
||||
self.consumed = true;
|
||||
let mut handlers = self.client.handler_set.lock();
|
||||
let id = (TypeId::of::<T>(), self.remote_id);
|
||||
@@ -473,21 +475,15 @@ impl settings::Settings for TelemetrySettings {
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
Ok(Self {
|
||||
diagnostics: sources
|
||||
.user
|
||||
.as_ref()
|
||||
.or(sources.server.as_ref())
|
||||
.and_then(|v| v.diagnostics)
|
||||
.unwrap_or(
|
||||
sources
|
||||
.default
|
||||
.diagnostics
|
||||
.ok_or_else(Self::missing_default)?,
|
||||
),
|
||||
diagnostics: sources.user.as_ref().and_then(|v| v.diagnostics).unwrap_or(
|
||||
sources
|
||||
.default
|
||||
.diagnostics
|
||||
.ok_or_else(Self::missing_default)?,
|
||||
),
|
||||
metrics: sources
|
||||
.user
|
||||
.as_ref()
|
||||
.or(sources.server.as_ref())
|
||||
.and_then(|v| v.metrics)
|
||||
.unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?),
|
||||
})
|
||||
@@ -534,13 +530,19 @@ impl Client {
|
||||
}
|
||||
|
||||
pub fn production(cx: &mut AppContext) -> Arc<Self> {
|
||||
let user_agent = format!(
|
||||
"Zed/{} ({}; {})",
|
||||
AppVersion::global(cx),
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
);
|
||||
let clock = Arc::new(clock::RealSystemClock);
|
||||
let http = Arc::new(HttpClientWithUrl::new_uri(
|
||||
cx.http_client(),
|
||||
let http = Arc::new(HttpClientWithUrl::new(
|
||||
&ClientSettings::get_global(cx).server_url,
|
||||
cx.http_client().proxy().cloned(),
|
||||
Some(user_agent),
|
||||
ProxySettings::get_global(cx).proxy.clone(),
|
||||
));
|
||||
Self::new(clock, http, cx)
|
||||
Self::new(clock, http.clone(), cx)
|
||||
}
|
||||
|
||||
pub fn id(&self) -> u64 {
|
||||
@@ -1030,7 +1032,7 @@ impl Client {
|
||||
&self,
|
||||
http: Arc<HttpClientWithUrl>,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
) -> impl Future<Output = Result<url::Url>> {
|
||||
) -> impl Future<Output = Result<Url>> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let url_override = self.rpc_url.read().clone();
|
||||
|
||||
@@ -1124,7 +1126,7 @@ impl Client {
|
||||
// for us from the RPC URL.
|
||||
//
|
||||
// Among other things, it will generate and set a `Sec-WebSocket-Key` header for us.
|
||||
let mut request = IntoClientRequest::into_client_request(rpc_url.as_str())?;
|
||||
let mut request = rpc_url.into_client_request()?;
|
||||
|
||||
// We then modify the request to add our desired headers.
|
||||
let request_headers = request.headers_mut();
|
||||
@@ -1144,33 +1146,8 @@ impl Client {
|
||||
|
||||
match url_scheme {
|
||||
Https => {
|
||||
let client_config = {
|
||||
let mut root_store = rustls::RootCertStore::empty();
|
||||
|
||||
let root_certs = rustls_native_certs::load_native_certs();
|
||||
for error in root_certs.errors {
|
||||
log::warn!("error loading native certs: {:?}", error);
|
||||
}
|
||||
root_store.add_parsable_certificates(
|
||||
&root_certs
|
||||
.certs
|
||||
.into_iter()
|
||||
.map(|cert| cert.as_ref().to_owned())
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(root_store)
|
||||
.with_no_client_auth()
|
||||
};
|
||||
|
||||
let (stream, _) =
|
||||
async_tungstenite::async_tls::client_async_tls_with_connector(
|
||||
request,
|
||||
stream,
|
||||
Some(client_config.into()),
|
||||
)
|
||||
.await?;
|
||||
async_tungstenite::async_std::client_async_tls(request, stream).await?;
|
||||
Ok(Connection::new(
|
||||
stream
|
||||
.map_err(|error| anyhow!(error))
|
||||
@@ -1629,10 +1606,6 @@ impl ProtoClient for Client {
|
||||
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet> {
|
||||
&self.handler_set
|
||||
}
|
||||
|
||||
fn is_via_collab(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@@ -1760,7 +1733,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
|
||||
}
|
||||
|
||||
/// prefix for the zed:// url scheme
|
||||
pub const ZED_URL_SCHEME: &str = "zed";
|
||||
pub static ZED_URL_SCHEME: &str = "zed";
|
||||
|
||||
/// Parses the given link into a Zed link.
|
||||
///
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
mod event_coalescer;
|
||||
|
||||
use crate::{ChannelId, TelemetrySettings};
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use clock::SystemClock;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::Future;
|
||||
use gpui::{AppContext, BackgroundExecutor, Task};
|
||||
use http_client::{self, AsyncBody, HttpClient, HttpClientWithUrl, Method, Request};
|
||||
use http_client::{self, HttpClient, HttpClientWithUrl, Method};
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::Mutex;
|
||||
use release_channel::ReleaseChannel;
|
||||
@@ -17,9 +16,9 @@ use std::io::Write;
|
||||
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent,
|
||||
SettingEvent,
|
||||
ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent,
|
||||
EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent,
|
||||
MemoryEvent, ReplEvent, SettingEvent,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
#[cfg(not(debug_assertions))]
|
||||
@@ -38,10 +37,9 @@ pub struct Telemetry {
|
||||
|
||||
struct TelemetryState {
|
||||
settings: TelemetrySettings,
|
||||
system_id: Option<Arc<str>>, // Per system
|
||||
metrics_id: Option<Arc<str>>, // Per logged-in user
|
||||
installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
|
||||
session_id: Option<String>, // Per app launch
|
||||
metrics_id: Option<Arc<str>>, // Per logged-in user
|
||||
release_channel: Option<&'static str>,
|
||||
architecture: &'static str,
|
||||
events_queue: Vec<EventWrapper>,
|
||||
@@ -193,10 +191,9 @@ impl Telemetry {
|
||||
settings: *TelemetrySettings::get_global(cx),
|
||||
architecture: env::consts::ARCH,
|
||||
release_channel,
|
||||
system_id: None,
|
||||
installation_id: None,
|
||||
session_id: None,
|
||||
metrics_id: None,
|
||||
session_id: None,
|
||||
events_queue: Vec::new(),
|
||||
flush_events_task: None,
|
||||
log_file: None,
|
||||
@@ -286,13 +283,11 @@ impl Telemetry {
|
||||
|
||||
pub fn start(
|
||||
self: &Arc<Self>,
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
session_id: String,
|
||||
cx: &AppContext,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let mut state = self.state.lock();
|
||||
state.system_id = system_id.map(|id| id.into());
|
||||
state.installation_id = installation_id.map(|id| id.into());
|
||||
state.session_id = Some(session_id);
|
||||
state.app_version = release_channel::AppVersion::global(cx).to_string();
|
||||
@@ -309,10 +304,7 @@ impl Telemetry {
|
||||
|
||||
let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory();
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_processes_specifics(
|
||||
sysinfo::ProcessesToUpdate::Some(&[current_process]),
|
||||
refresh_kind,
|
||||
);
|
||||
system.refresh_process_specifics(current_process, refresh_kind);
|
||||
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
@@ -322,10 +314,7 @@ impl Telemetry {
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_processes_specifics(
|
||||
sysinfo::ProcessesToUpdate::Some(&[current_process]),
|
||||
refresh_kind,
|
||||
);
|
||||
system.refresh_process_specifics(current_process, refresh_kind);
|
||||
let Some(process) = system.process(current_process) else {
|
||||
log::error!(
|
||||
"Failed to find own process {current_process:?} in system process table"
|
||||
@@ -365,7 +354,6 @@ impl Telemetry {
|
||||
operation: &'static str,
|
||||
copilot_enabled: bool,
|
||||
copilot_enabled_for_language: bool,
|
||||
is_via_ssh: bool,
|
||||
) {
|
||||
let event = Event::Editor(EditorEvent {
|
||||
file_extension,
|
||||
@@ -373,7 +361,6 @@ impl Telemetry {
|
||||
operation: operation.into(),
|
||||
copilot_enabled,
|
||||
copilot_enabled_for_language,
|
||||
is_via_ssh,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
@@ -394,8 +381,23 @@ impl Telemetry {
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_assistant_event(self: &Arc<Self>, event: AssistantEvent) {
|
||||
self.report_event(Event::Assistant(event));
|
||||
pub fn report_assistant_event(
|
||||
self: &Arc<Self>,
|
||||
conversation_id: Option<String>,
|
||||
kind: AssistantKind,
|
||||
model: String,
|
||||
response_latency: Option<Duration>,
|
||||
error_message: Option<String>,
|
||||
) {
|
||||
let event = Event::Assistant(AssistantEvent {
|
||||
conversation_id,
|
||||
kind,
|
||||
model: model.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_call_event(
|
||||
@@ -459,7 +461,7 @@ impl Telemetry {
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn log_edit_event(self: &Arc<Self>, environment: &'static str, is_via_ssh: bool) {
|
||||
pub fn log_edit_event(self: &Arc<Self>, environment: &'static str) {
|
||||
let mut state = self.state.lock();
|
||||
let period_data = state.event_coalescer.log_event(environment);
|
||||
drop(state);
|
||||
@@ -468,7 +470,6 @@ impl Telemetry {
|
||||
let event = Event::Edit(EditEvent {
|
||||
duration: end.timestamp_millis() - start.timestamp_millis(),
|
||||
environment: environment.to_string(),
|
||||
is_via_ssh,
|
||||
});
|
||||
|
||||
self.report_event(event);
|
||||
@@ -489,7 +490,7 @@ impl Telemetry {
|
||||
worktree_id: WorktreeId,
|
||||
updated_entries_set: &UpdatedEntriesSet,
|
||||
) {
|
||||
let project_type_names: Vec<String> = {
|
||||
let project_names: Vec<String> = {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.worktree_id_map
|
||||
@@ -525,8 +526,8 @@ impl Telemetry {
|
||||
};
|
||||
|
||||
// Done on purpose to avoid calling `self.state.lock()` multiple times
|
||||
for project_type_name in project_type_names {
|
||||
self.report_app_event(format!("open {} project", project_type_name));
|
||||
for project_name in project_names {
|
||||
self.report_app_event(format!("open {} project", project_name));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -598,29 +599,6 @@ impl Telemetry {
|
||||
self.state.lock().is_staff
|
||||
}
|
||||
|
||||
fn build_request(
|
||||
self: &Arc<Self>,
|
||||
// We take in the JSON bytes buffer so we can reuse the existing allocation.
|
||||
mut json_bytes: Vec<u8>,
|
||||
event_request: EventRequestBody,
|
||||
) -> Result<Request<AsyncBody>> {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, &event_request)?;
|
||||
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string());
|
||||
|
||||
Ok(Request::builder()
|
||||
.method(Method::POST)
|
||||
.uri(
|
||||
self.http_client
|
||||
.build_zed_api_url("/telemetry/events", &[])?
|
||||
.as_ref(),
|
||||
)
|
||||
.header("Content-Type", "application/json")
|
||||
.header("x-zed-checksum", checksum)
|
||||
.body(json_bytes.into())?)
|
||||
}
|
||||
|
||||
pub fn flush_events(self: &Arc<Self>) {
|
||||
let mut state = self.state.lock();
|
||||
state.first_event_date_time = None;
|
||||
@@ -647,14 +625,13 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
let request_body = {
|
||||
{
|
||||
let state = this.state.lock();
|
||||
|
||||
EventRequestBody {
|
||||
system_id: state.system_id.as_deref().map(Into::into),
|
||||
let request_body = EventRequestBody {
|
||||
installation_id: state.installation_id.as_deref().map(Into::into),
|
||||
session_id: state.session_id.clone(),
|
||||
metrics_id: state.metrics_id.as_deref().map(Into::into),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff,
|
||||
app_version: state.app_version.clone(),
|
||||
os_name: state.os_name.clone(),
|
||||
@@ -663,11 +640,25 @@ impl Telemetry {
|
||||
|
||||
release_channel: state.release_channel.map(Into::into),
|
||||
events,
|
||||
}
|
||||
};
|
||||
};
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, &request_body)?;
|
||||
}
|
||||
|
||||
let request = this.build_request(json_bytes, request_body)?;
|
||||
let response = this.http_client.send(request).await?;
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string());
|
||||
|
||||
let request = http_client::Request::builder()
|
||||
.method(Method::POST)
|
||||
.uri(
|
||||
this.http_client
|
||||
.build_zed_api_url("/telemetry/events", &[])?
|
||||
.as_ref(),
|
||||
)
|
||||
.header("Content-Type", "text/plain")
|
||||
.header("x-zed-checksum", checksum)
|
||||
.body(json_bytes.into());
|
||||
|
||||
let response = this.http_client.send(request?).await?;
|
||||
if response.status() != 200 {
|
||||
log::error!("Failed to send events: HTTP {:?}", response.status());
|
||||
}
|
||||
@@ -712,7 +703,6 @@ mod tests {
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
|
||||
));
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let system_id = Some("system_id".to_string());
|
||||
let installation_id = Some("installation_id".to_string());
|
||||
let session_id = "session_id".to_string();
|
||||
|
||||
@@ -720,7 +710,7 @@ mod tests {
|
||||
let telemetry = Telemetry::new(clock.clone(), http, cx);
|
||||
|
||||
telemetry.state.lock().max_queue_size = 4;
|
||||
telemetry.start(system_id, installation_id, session_id, cx);
|
||||
telemetry.start(installation_id, session_id, cx);
|
||||
|
||||
assert!(is_empty_state(&telemetry));
|
||||
|
||||
@@ -798,14 +788,13 @@ mod tests {
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
|
||||
));
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let system_id = Some("system_id".to_string());
|
||||
let installation_id = Some("installation_id".to_string());
|
||||
let session_id = "session_id".to_string();
|
||||
|
||||
cx.update(|cx| {
|
||||
let telemetry = Telemetry::new(clock.clone(), http, cx);
|
||||
telemetry.state.lock().max_queue_size = 4;
|
||||
telemetry.start(system_id, installation_id, session_id, cx);
|
||||
telemetry.start(installation_id, session_id, cx);
|
||||
|
||||
assert!(is_empty_state(&telemetry));
|
||||
|
||||
|
||||
@@ -138,7 +138,7 @@ enum UpdateContacts {
|
||||
}
|
||||
|
||||
impl UserStore {
|
||||
pub fn new(client: Arc<Client>, cx: &ModelContext<Self>) -> Self {
|
||||
pub fn new(client: Arc<Client>, cx: &mut ModelContext<Self>) -> Self {
|
||||
let (mut current_user_tx, current_user_rx) = watch::channel();
|
||||
let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded();
|
||||
let rpc_subscriptions = vec.
|
||||
#[derive(Clone, Default, Hash, Eq, PartialEq)]
|
||||
pub struct Global {
|
||||
values: SmallVec<[u32; 8]>,
|
||||
local_branch_value: u32,
|
||||
}
|
||||
pub struct Global(SmallVec<[u32; 8]>);
|
||||
|
||||
impl Global {
|
||||
pub fn new() -> Self {
|
||||
@@ -38,51 +33,41 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn get(&self, replica_id: ReplicaId) -> Seq {
|
||||
if replica_id == LOCAL_BRANCH_REPLICA_ID {
|
||||
self.local_branch_value
|
||||
} else {
|
||||
self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq
|
||||
}
|
||||
self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq
|
||||
}
|
||||
|
||||
pub fn observe(&mut self, timestamp: Lamport) {
|
||||
if timestamp.value > 0 {
|
||||
if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID {
|
||||
self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value);
|
||||
} else {
|
||||
let new_len = timestamp.replica_id as usize + 1;
|
||||
if new_len > self.values.len() {
|
||||
self.values.resize(new_len, 0);
|
||||
}
|
||||
|
||||
let entry = &mut self.values[timestamp.replica_id as usize];
|
||||
*entry = cmp::max(*entry, timestamp.value);
|
||||
let new_len = timestamp.replica_id as usize + 1;
|
||||
if new_len > self.0.len() {
|
||||
self.0.resize(new_len, 0);
|
||||
}
|
||||
|
||||
let entry = &mut self.0[timestamp.replica_id as usize];
|
||||
*entry = cmp::max(*entry, timestamp.value);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&mut self, other: &Self) {
|
||||
if other.values.len() > self.values.len() {
|
||||
self.values.resize(other.values.len(), 0);
|
||||
if other.0.len() > self.0.len() {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
}
|
||||
|
||||
for (left, right) in self.values.iter_mut().zip(&other.values) {
|
||||
for (left, right) in self.0.iter_mut().zip(&other.0) {
|
||||
*left = cmp::max(*left, *right);
|
||||
}
|
||||
|
||||
self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value);
|
||||
}
|
||||
|
||||
pub fn meet(&mut self, other: &Self) {
|
||||
if other.values.len() > self.values.len() {
|
||||
self.values.resize(other.values.len(), 0);
|
||||
if other.0.len() > self.0.len() {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
}
|
||||
|
||||
let mut new_len = 0;
|
||||
for (ix, (left, right)) in self
|
||||
.values
|
||||
.0
|
||||
.iter_mut()
|
||||
.zip(other.values.iter().chain(iter::repeat(&0)))
|
||||
.zip(other.0.iter().chain(iter::repeat(&0)))
|
||||
.enumerate()
|
||||
{
|
||||
if *left == 0 {
|
||||
@@ -95,8 +80,7 @@ impl Global {
|
||||
new_len = ix + 1;
|
||||
}
|
||||
}
|
||||
self.values.resize(new_len, 0);
|
||||
self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value);
|
||||
self.0.resize(new_len, 0);
|
||||
}
|
||||
|
||||
pub fn observed(&self, timestamp: Lamport) -> bool {
|
||||
@@ -104,44 +88,34 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn observed_any(&self, other: &Self) -> bool {
|
||||
self.values
|
||||
self.0
|
||||
.iter()
|
||||
.zip(other.values.iter())
|
||||
.zip(other.0.iter())
|
||||
.any(|(left, right)| *right > 0 && left >= right)
|
||||
|| (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value)
|
||||
}
|
||||
|
||||
pub fn observed_all(&self, other: &Self) -> bool {
|
||||
let mut rhs = other.values.iter();
|
||||
self.values.iter().all(|left| match rhs.next() {
|
||||
let mut rhs = other.0.iter();
|
||||
self.0.iter().all(|left| match rhs.next() {
|
||||
Some(right) => left >= right,
|
||||
None => true,
|
||||
}) && rhs.next().is_none()
|
||||
&& self.local_branch_value >= other.local_branch_value
|
||||
}
|
||||
|
||||
pub fn changed_since(&self, other: &Self) -> bool {
|
||||
self.values.len() > other.values.len()
|
||||
self.0.len() > other.0.len()
|
||||
|| self
|
||||
.values
|
||||
.0
|
||||
.iter()
|
||||
.zip(other.values.iter())
|
||||
.zip(other.0.iter())
|
||||
.any(|(left, right)| left > right)
|
||||
|| self.local_branch_value > other.local_branch_value
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = Lamport> + '_ {
|
||||
self.values
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(replica_id, seq)| Lamport {
|
||||
replica_id: replica_id as ReplicaId,
|
||||
value: *seq,
|
||||
})
|
||||
.chain((self.local_branch_value > 0).then_some(Lamport {
|
||||
replica_id: LOCAL_BRANCH_REPLICA_ID,
|
||||
value: self.local_branch_value,
|
||||
}))
|
||||
self.0.iter().enumerate().map(|(replica_id, seq)| Lamport {
|
||||
replica_id: replica_id as ReplicaId,
|
||||
value: *seq,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,11 +190,7 @@ impl fmt::Debug for Global {
|
||||
if timestamp.replica_id > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID {
|
||||
write!(f, "<branch>: {}", timestamp.value)?;
|
||||
} else {
|
||||
write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?;
|
||||
}
|
||||
write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?;
|
||||
}
|
||||
write!(f, "}}")
|
||||
}
|
||||
|
||||
@@ -28,11 +28,10 @@ axum = { version = "0.6", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.4", features = ["erased-json"] }
|
||||
base64.workspace = true
|
||||
chrono.workspace = true
|
||||
clickhouse.workspace = true
|
||||
clock.workspace = true
|
||||
clickhouse.workspace = true
|
||||
collections.workspace = true
|
||||
dashmap.workspace = true
|
||||
derive_more.workspace = true
|
||||
envy = "0.4.2"
|
||||
futures.workspace = true
|
||||
google_ai.workspace = true
|
||||
@@ -43,14 +42,13 @@ live_kit_server.workspace = true
|
||||
log.workspace = true
|
||||
nanoid.workspace = true
|
||||
open_ai.workspace = true
|
||||
supermaven_api.workspace = true
|
||||
parking_lot.workspace = true
|
||||
prometheus = "0.13"
|
||||
prost.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
reqwest_client.workspace = true
|
||||
rpc.workspace = true
|
||||
rustc-demangle.workspace = true
|
||||
scrypt = "0.11"
|
||||
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||
semantic_version.workspace = true
|
||||
@@ -62,12 +60,12 @@ sha2.workspace = true
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
strum.workspace = true
|
||||
subtle.workspace = true
|
||||
supermaven_api.workspace = true
|
||||
rustc-demangle.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
tokio.workspace = true
|
||||
toml.workspace = true
|
||||
tower = "0.4"
|
||||
tower-http = { workspace = true, features = ["trace"] }
|
||||
@@ -86,7 +84,6 @@ client = { workspace = true, features = ["test-support"] }
|
||||
collab_ui = { workspace = true, features = ["test-support"] }
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
dev_server_projects.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
file_finder.workspace = true
|
||||
@@ -94,7 +91,6 @@ fs = { workspace = true, features = ["test-support"] }
|
||||
git = { workspace = true, features = ["test-support"] }
|
||||
git_hosting_providers.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
headless.workspace = true
|
||||
hyper.workspace = true
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
@@ -111,6 +107,7 @@ recent_projects = { workspace = true }
|
||||
release_channel.workspace = true
|
||||
remote = { workspace = true, features = ["test-support"] }
|
||||
remote_server.workspace = true
|
||||
dev_server_projects.workspace = true
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
@@ -122,6 +119,7 @@ unindent.workspace = true
|
||||
util.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
worktree = { workspace = true, features = ["test-support"] }
|
||||
headless.workspace = true
|
||||
|
||||
[package.metadata.cargo-machete]
|
||||
ignored = ["async-stripe"]
|
||||
|
||||
@@ -23,7 +23,8 @@ To use a different set of admin users, create `crates/collab/seed.json`.
|
||||
```json
|
||||
{
|
||||
"admins": ["yourgithubhere"],
|
||||
"channels": ["zed"]
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 20
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -149,6 +149,16 @@ spec:
|
||||
secretKeyRef:
|
||||
name: google-ai
|
||||
key: api_key
|
||||
- name: RUNPOD_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: runpod
|
||||
key: api_key
|
||||
- name: RUNPOD_API_SUMMARY_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: runpod
|
||||
key: summary
|
||||
- name: BLOB_STORE_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -199,12 +209,6 @@ spec:
|
||||
secretKeyRef:
|
||||
name: slack
|
||||
key: panics_webhook
|
||||
- name: STRIPE_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: stripe
|
||||
key: api_key
|
||||
optional: true
|
||||
- name: COMPLETE_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR
|
||||
value: "1000"
|
||||
- name: SUPERMAVEN_ADMIN_API_KEY
|
||||
|
||||
@@ -112,7 +112,6 @@ CREATE TABLE "worktree_settings_files" (
|
||||
"worktree_id" INTEGER NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"content" TEXT,
|
||||
"kind" VARCHAR,
|
||||
PRIMARY KEY(project_id, worktree_id, path),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
@@ -422,15 +421,6 @@ CREATE TABLE dev_server_projects (
|
||||
paths TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS billing_preferences (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||
max_monthly_llm_usage_spending_in_cents INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "uix_billing_preferences_on_user_id" ON billing_preferences (user_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS billing_customers (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
ALTER TABLE "worktree_settings_files" ADD COLUMN "kind" VARCHAR;
|
||||
@@ -1,8 +0,0 @@
|
||||
create table if not exists billing_preferences (
|
||||
id serial primary key,
|
||||
created_at timestamp without time zone not null default now(),
|
||||
user_id integer not null references users(id) on delete cascade,
|
||||
max_monthly_llm_usage_spending_in_cents integer not null
|
||||
);
|
||||
|
||||
create unique index "uix_billing_preferences_on_user_id" on billing_preferences (user_id);
|
||||
@@ -1,11 +0,0 @@
|
||||
alter table models
|
||||
add column price_per_million_cache_creation_input_tokens integer not null default 0,
|
||||
add column price_per_million_cache_read_input_tokens integer not null default 0;
|
||||
|
||||
alter table usages
|
||||
add column cache_creation_input_tokens_this_month bigint not null default 0,
|
||||
add column cache_read_input_tokens_this_month bigint not null default 0;
|
||||
|
||||
alter table lifetime_usages
|
||||
add column cache_creation_input_tokens bigint not null default 0,
|
||||
add column cache_read_input_tokens bigint not null default 0;
|
||||